lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | mit | 6a9856de749f41490dd1eb117e711e5a02abbe5a | 0 | csmith/DMDirc,ShaneMcC/DMDirc-Client,DMDirc/DMDirc,greboid/DMDirc,ShaneMcC/DMDirc-Client,greboid/DMDirc,greboid/DMDirc,csmith/DMDirc,ShaneMcC/DMDirc-Client,greboid/DMDirc,DMDirc/DMDirc,DMDirc/DMDirc,ShaneMcC/DMDirc-Client,csmith/DMDirc,DMDirc/DMDirc,csmith/DMDirc | /*
* Copyright (c) 2006-2010 Chris Smith, Shane Mc Cormack, Gregory Holmes
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.config;
import com.dmdirc.Main;
import com.dmdirc.Precondition;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.logger.Logger;
import com.dmdirc.updater.Version;
import com.dmdirc.util.ConfigFile;
import com.dmdirc.util.InvalidConfigFileException;
import com.dmdirc.util.MapList;
import com.dmdirc.util.WeakMapList;
import com.dmdirc.util.resourcemanager.ResourceManager;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
/**
* The identity manager manages all known identities, providing easy methods
* to access them.
*
* @author chris
*/
public final class IdentityManager {
/**
* The identities that have been loaded into this manager.
*
* Standard identities are inserted with a <code>null</code> key, custom
* identities use their custom type as the key.
*/
private static final MapList<String, Identity> IDENTITIES
= new MapList<String, Identity>();
/**
* The {@link IdentityListener}s that have registered with this manager.
*
* Listeners for standard identities are inserted with a <code>null</code>
* key, listeners for a specific custom type use their type as the key.
*/
private static final MapList<String, IdentityListener> LISTENERS
= new WeakMapList<String, IdentityListener>();
/** A logger for this class. */
private static final java.util.logging.Logger LOGGER = java.util.logging
.Logger.getLogger(IdentityManager.class.getName());
/** The identity file used for the global config. */
private static Identity config;
/** The identity file used for addon defaults. */
private static Identity addonConfig;
/** The identity file bundled with the client containing version info. */
private static Identity versionConfig;
/** The config manager used for global settings. */
private static ConfigManager globalconfig;
/** Creates a new instance of IdentityManager. */
private IdentityManager() {
}
/**
* Loads all identity files.
*
* @throws InvalidIdentityFileException If there is an error with the config
* file.
*/
public static void load() throws InvalidIdentityFileException {
IDENTITIES.clear();
loadVersion();
loadDefaults();
loadUser();
loadConfig();
if (getCustomIdentities("profile").isEmpty()) {
try {
Identity.buildProfile("Default Profile");
} catch (IOException ex) {
Logger.userError(ErrorLevel.FATAL, "Unable to write default profile", ex);
}
}
// Set up the identity used for the addons defaults
final ConfigTarget target = new ConfigTarget();
target.setGlobalDefault();
target.setOrder(500000);
final ConfigFile addonConfigFile = new ConfigFile((File) null);
final Map<String, String> addonSettings = new HashMap<String, String>();
addonSettings.put("name", "Addon defaults");
addonConfigFile.addDomain("identity", addonSettings);
addonConfig = new Identity(addonConfigFile, target);
IdentityManager.addIdentity(addonConfig);
if (!getGlobalConfig().hasOptionString("identity", "defaultsversion")) {
Logger.userError(ErrorLevel.FATAL, "Default settings "
+ "could not be loaded");
}
}
/** Loads the default (built in) identities. */
private static void loadDefaults() {
final String[] targets = {"default", "modealiases"};
final String dir = getDirectory();
for (String target : targets) {
File file = new File(dir + target);
if (file.exists() && !file.isDirectory()) {
boolean success = false;
for (int i = 0; i < 10 && !success; i++) {
final String suffix = ".old" + (i > 0 ? "-" + i : "");
success = file.renameTo(new File(file.getParentFile(), target + suffix));
}
if (!success) {
Logger.userError(ErrorLevel.HIGH, "Unable to create directory for "
+ "default settings folder (" + target + ")", "A file "
+ "with that name already exists, and couldn't be renamed."
+ " Rename or delete " + file.getAbsolutePath());
continue;
}
}
if (!file.exists() || file.listFiles() == null || file.listFiles().length == 0) {
file.mkdirs();
extractIdentities(target);
}
loadUser(file);
}
extractFormatters();
// If the bundled defaults are newer than the ones the user is
// currently using, extract them.
if (getGlobalConfig().hasOptionString("identity", "defaultsversion")
&& getGlobalConfig().hasOptionString("updater", "bundleddefaultsversion")) {
final Version installedVersion = new Version(getGlobalConfig()
.getOption("identity", "defaultsversion"));
final Version bundledVersion = new Version(getGlobalConfig()
.getOption("updater", "bundleddefaultsversion"));
if (bundledVersion.compareTo(installedVersion) > 0) {
extractIdentities("default");
loadUser(new File(dir, "default"));
}
}
}
private static void extractFormatters() {
try {
ResourceManager.getResourceManager().extractResource(
"com/dmdirc/config/defaults/default/formatter",
getDirectory() + "default/", false);
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to extract default "
+ "formatters: " + ex.getMessage());
}
}
/**
* Extracts the specific set of default identities to the user's identity
* folder.
*
* @param target The target to be extracted
*/
private static void extractIdentities(final String target) {
try {
ResourceManager.getResourceManager().extractResources(
"com/dmdirc/config/defaults/" + target,
getDirectory() + target, false);
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to extract default "
+ "identities: " + ex.getMessage());
}
}
/**
* Retrieves the directory used to store identities in.
*
* @return The identity directory path
*/
public static String getDirectory() {
return Main.getConfigDir() + "identities" + System.getProperty("file.separator");
}
/** Loads user-defined identity files. */
public static void loadUser() {
final File dir = new File(getDirectory());
if (!dir.exists()) {
try {
dir.mkdirs();
dir.createNewFile();
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to create identity dir");
}
}
loadUser(dir);
}
/**
* Recursively loads files from the specified directory.
*
* @param dir The directory to be loaded
*/
@Precondition({
"The specified File is not null",
"The specified File is a directory"
})
private static void loadUser(final File dir) {
Logger.assertTrue(dir != null);
Logger.assertTrue(dir.isDirectory());
if (dir.listFiles() == null) {
Logger.userError(ErrorLevel.MEDIUM,
"Unable to load user identity files from "
+ dir.getAbsolutePath());
} else {
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
loadUser(file);
} else {
loadIdentity(file);
}
}
}
}
/**
* Loads an identity from the specified file. If the identity already
* exists, it is told to reload instead.
*
* @param file The file to load the identity from.
*/
private static void loadIdentity(final File file) {
synchronized (IDENTITIES) {
for (Identity identity : getAllIdentities()) {
if (identity.isFile(file)) {
try {
identity.reload();
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"I/O error when reloading identity file: "
+ file.getAbsolutePath() + " (" + ex.getMessage() + ")");
} catch (InvalidConfigFileException ex) {
// Do nothing
}
return;
}
}
}
try {
addIdentity(new Identity(file, false));
} catch (InvalidIdentityFileException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"Invalid identity file: " + file.getAbsolutePath()
+ " (" + ex.getMessage() + ")");
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"I/O error when reading identity file: "
+ file.getAbsolutePath());
}
}
/**
* Retrieves all known identities.
*
* @return A set of all known identities
* @since 0.6.4
*/
private static Set<Identity> getAllIdentities() {
final Set<Identity> res = new LinkedHashSet<Identity>();
for (Map.Entry<String, List<Identity>> entry : IDENTITIES.entrySet()) {
res.addAll(entry.getValue());
}
return res;
}
/**
* Returns the "group" to which the specified identity belongs. For custom
* identities this is the custom identity type, otherwise this is
* <code>null</code>.
*
* @param identity The identity whose group is being retrieved
* @return The group of the specified identity
* @since 0.6.4
*/
private static String getGroup(final Identity identity) {
return identity.getTarget().getType() == ConfigTarget.TYPE.CUSTOM
? identity.getTarget().getData() : null;
}
/** Loads the version information. */
public static void loadVersion() {
try {
versionConfig = new Identity(Main.class.getResourceAsStream("version.config"), false);
addIdentity(versionConfig);
} catch (IOException ex) {
Logger.appError(ErrorLevel.FATAL, "Unable to load version information", ex);
} catch (InvalidIdentityFileException ex) {
Logger.appError(ErrorLevel.FATAL, "Unable to load version information", ex);
}
}
/**
* Loads the config identity.
*
* @throws InvalidIdentityFileException if there is a problem with the
* config file.
*/
private static void loadConfig() throws InvalidIdentityFileException {
try {
final File file = new File(Main.getConfigDir() + "dmdirc.config");
if (!file.exists()) {
file.createNewFile();
}
config = new Identity(file, true);
config.setOption("identity", "name", "Global config");
addIdentity(config);
} catch (IOException ex) {
Logger.userError(ErrorLevel.FATAL, "I/O error when loading global config: "
+ ex.getMessage(), ex);
}
}
/**
* Retrieves the identity used for the global config.
*
* @return The global config identity
*/
public static Identity getConfigIdentity() {
return config;
}
/**
* Retrieves the identity used for addons defaults.
*
* @return The addons defaults identity
*/
public static Identity getAddonIdentity() {
return addonConfig;
}
/**
* Retrieves the identity bundled with the DMDirc client containing
* version information.
*
* @return The version identity
* @since 0.6.3m2
*/
public static Identity getVersionIdentity() {
return versionConfig;
}
/**
* Saves all modified identity files to disk.
*/
public static void save() {
synchronized (IDENTITIES) {
for (Identity identity : getAllIdentities()) {
identity.save();
}
}
}
/**
* Adds the specific identity to this manager.
* @param identity The identity to be added
*/
@Precondition("The specified Identity is not null")
public static void addIdentity(final Identity identity) {
Logger.assertTrue(identity != null);
final String target = getGroup(identity);
if (IDENTITIES.containsValue(target, identity)) {
removeIdentity(identity);
}
synchronized (IDENTITIES) {
IDENTITIES.add(target, identity);
}
LOGGER.log(Level.FINER, "Adding identity: {0} (group: {1})",
new Object[]{identity, target});
synchronized (LISTENERS) {
for (IdentityListener listener : LISTENERS.safeGet(target)) {
listener.identityAdded(identity);
}
}
}
/**
* Removes an identity from this manager.
* @param identity The identity to be removed
*/
@Precondition({
"The specified Identity is not null",
"The specified Identity has previously been added and not removed"
})
public static void removeIdentity(final Identity identity) {
Logger.assertTrue(identity != null);
final String group = getGroup(identity);
Logger.assertTrue(IDENTITIES.containsValue(group, identity));
synchronized (IDENTITIES) {
IDENTITIES.remove(group, identity);
}
synchronized (LISTENERS) {
for (IdentityListener listener : LISTENERS.safeGet(group)) {
listener.identityRemoved(identity);
}
}
}
/**
* Adds a config manager to this manager.
*
* @param manager The ConfigManager to add
* @deprecated Use {@link #addIdentityListener(com.dmdirc.config.IdentityListener)}
*/
@Deprecated
@Precondition("The specified ConfigManager is not null")
public static void addConfigManager(final ConfigManager manager) {
addIdentityListener(manager);
}
/**
* Adds a new identity listener which will be informed of all settings
* identities which are added to this manager.
*
* @param listener The listener to be added
* @since 0.6.4
*/
@Precondition("The specified listener is not null")
public static void addIdentityListener(final IdentityListener listener) {
addIdentityListener(null, listener);
}
/**
* Adds a new identity listener which will be informed of all identities
* of the specified custom type which are added to this manager.
*
* @param type The type of identities to listen for
* @param listener The listener to be added
* @since 0.6.4
*/
@Precondition("The specified listener is not null")
public static void addIdentityListener(final String type, final IdentityListener listener) {
Logger.assertTrue(listener != null);
synchronized (LISTENERS) {
LISTENERS.add(type, listener);
}
}
/**
* Retrieves a list of identities that serve as profiles.
*
* @return A list of profiles
* @deprecated Use {@link #getCustomIdentities(java.lang.String)} with
* an argument of <code>profile</code> to retrieve profiles.
*/
@Deprecated
public static List<Identity> getProfiles() {
return getCustomIdentities("profile");
}
/**
* Retrieves a list of identities that belong to the specified custom type.
*
* @param type The type of identity to search for
* @return A list of matching identities
* @since 0.6.4
*/
public static List<Identity> getCustomIdentities(final String type) {
return Collections.unmodifiableList(IDENTITIES.safeGet(type));
}
/**
* Retrieves a list of all config sources that should be applied to the
* specified config manager.
*
* @param manager The manager requesting sources
* @return A list of all matching config sources
*/
public static List<Identity> getSources(final ConfigManager manager) {
final List<Identity> sources = new ArrayList<Identity>();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (manager.identityApplies(identity)) {
sources.add(identity);
}
}
}
Collections.sort(sources);
return sources;
}
/**
* Retrieves the global config manager.
*
* @return The global config manager
*/
public static synchronized ConfigManager getGlobalConfig() {
if (globalconfig == null) {
globalconfig = new ConfigManager("", "", "", "");
}
return globalconfig;
}
/**
* Retrieves the config for the specified channel@network. The config is
* created if it doesn't exist.
*
* @param network The name of the network
* @param channel The name of the channel
* @return A config source for the channel
*/
@Precondition({
"The specified network is non-null and not empty",
"The specified channel is non-null and not empty"
})
public static Identity getChannelConfig(final String network, final String channel) {
if (network == null || network.isEmpty()) {
throw new IllegalArgumentException("getChannelConfig called "
+ "with null or empty network\n\nNetwork: " + network);
}
if (channel == null || channel.isEmpty()) {
throw new IllegalArgumentException("getChannelConfig called "
+ "with null or empty channel\n\nChannel: " + channel);
}
final String myTarget = (channel + "@" + network).toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.CHANNEL
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setChannel(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create channel identity", ex);
return null;
}
}
/**
* Retrieves the config for the specified network. The config is
* created if it doesn't exist.
*
* @param network The name of the network
* @return A config source for the network
*/
@Precondition("The specified network is non-null and not empty")
public static Identity getNetworkConfig(final String network) {
if (network == null || network.isEmpty()) {
throw new IllegalArgumentException("getNetworkConfig called "
+ "with null or empty network\n\nNetwork:" + network);
}
final String myTarget = network.toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.NETWORK
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setNetwork(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create network identity", ex);
return null;
}
}
/**
* Retrieves the config for the specified server. The config is
* created if it doesn't exist.
*
* @param server The name of the server
* @return A config source for the server
*/
@Precondition("The specified server is non-null and not empty")
public static Identity getServerConfig(final String server) {
if (server == null || server.isEmpty()) {
throw new IllegalArgumentException("getServerConfig called "
+ "with null or empty server\n\nServer: " + server);
}
final String myTarget = server.toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.SERVER
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setServer(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create network identity", ex);
return null;
}
}
}
| src/com/dmdirc/config/IdentityManager.java | /*
* Copyright (c) 2006-2010 Chris Smith, Shane Mc Cormack, Gregory Holmes
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.config;
import com.dmdirc.Main;
import com.dmdirc.Precondition;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.logger.Logger;
import com.dmdirc.updater.Version;
import com.dmdirc.util.ConfigFile;
import com.dmdirc.util.InvalidConfigFileException;
import com.dmdirc.util.MapList;
import com.dmdirc.util.WeakMapList;
import com.dmdirc.util.resourcemanager.ResourceManager;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* The identity manager manages all known identities, providing easy methods
* to access them.
*
* @author chris
*/
public final class IdentityManager {
/**
* The identities that have been loaded into this manager.
*
* Standard identities are inserted with a <code>null</code> key, custom
* identities use their custom type as the key.
*/
private static final MapList<String, Identity> IDENTITIES
= new MapList<String, Identity>();
/**
* The {@link IdentityListener}s that have registered with this manager.
*
* Listeners for standard identities are inserted with a <code>null</code>
* key, listeners for a specific custom type use their type as the key.
*/
private static final MapList<String, IdentityListener> LISTENERS
= new WeakMapList<String, IdentityListener>();
/** A logger for this class. */
private static final java.util.logging.Logger LOGGER = java.util.logging
.Logger.getLogger(IdentityManager.class.getName());
/** The identity file used for the global config. */
private static Identity config;
/** The identity file used for addon defaults. */
private static Identity addonConfig;
/** The identity file bundled with the client containing version info. */
private static Identity versionConfig;
/** The config manager used for global settings. */
private static ConfigManager globalconfig;
/** Creates a new instance of IdentityManager. */
private IdentityManager() {
}
/**
* Loads all identity files.
*
* @throws InvalidIdentityFileException If there is an error with the config
* file.
*/
public static void load() throws InvalidIdentityFileException {
IDENTITIES.clear();
IDENTITIES.clear();
loadVersion();
loadDefaults();
loadUser();
loadConfig();
if (getProfiles().size() == 0) {
try {
Identity.buildProfile("Default Profile");
} catch (IOException ex) {
Logger.userError(ErrorLevel.FATAL, "Unable to write default profile", ex);
}
}
// Set up the identity used for the addons defaults
final ConfigTarget target = new ConfigTarget();
target.setGlobalDefault();
target.setOrder(500000);
final ConfigFile addonConfigFile = new ConfigFile((File) null);
final Map<String, String> addonSettings = new HashMap<String, String>();
addonSettings.put("name", "Addon defaults");
addonConfigFile.addDomain("identity", addonSettings);
addonConfig = new Identity(addonConfigFile, target);
IdentityManager.addIdentity(addonConfig);
if (!getGlobalConfig().hasOptionString("identity", "defaultsversion")) {
Logger.userError(ErrorLevel.FATAL, "Default settings "
+ "could not be loaded");
}
}
/** Loads the default (built in) identities. */
private static void loadDefaults() {
final String[] targets = {"default", "modealiases"};
final String dir = getDirectory();
for (String target : targets) {
File file = new File(dir + target);
if (file.exists() && !file.isDirectory()) {
boolean success = false;
for (int i = 0; i < 10 && !success; i++) {
final String suffix = ".old" + (i > 0 ? "-" + i : "");
success = file.renameTo(new File(file.getParentFile(), target + suffix));
}
if (!success) {
Logger.userError(ErrorLevel.HIGH, "Unable to create directory for "
+ "default settings folder (" + target + ")", "A file "
+ "with that name already exists, and couldn't be renamed."
+ " Rename or delete " + file.getAbsolutePath());
continue;
}
}
if (!file.exists() || file.listFiles() == null || file.listFiles().length == 0) {
file.mkdirs();
extractIdentities(target);
}
loadUser(file);
}
extractFormatters();
// If the bundled defaults are newer than the ones the user is
// currently using, extract them.
if (getGlobalConfig().hasOptionString("identity", "defaultsversion")
&& getGlobalConfig().hasOptionString("updater", "bundleddefaultsversion")) {
final Version installedVersion = new Version(getGlobalConfig()
.getOption("identity", "defaultsversion"));
final Version bundledVersion = new Version(getGlobalConfig()
.getOption("updater", "bundleddefaultsversion"));
if (bundledVersion.compareTo(installedVersion) > 0) {
extractIdentities("default");
loadUser(new File(dir, "default"));
}
}
}
private static void extractFormatters() {
try {
ResourceManager.getResourceManager().extractResource(
"com/dmdirc/config/defaults/default/formatter",
getDirectory() + "default/", false);
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to extract default "
+ "formatters: " + ex.getMessage());
}
}
/**
* Extracts the specific set of default identities to the user's identity
* folder.
*
* @param target The target to be extracted
*/
private static void extractIdentities(final String target) {
try {
ResourceManager.getResourceManager().extractResources(
"com/dmdirc/config/defaults/" + target,
getDirectory() + target, false);
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to extract default "
+ "identities: " + ex.getMessage());
}
}
/**
* Retrieves the directory used to store identities in.
*
* @return The identity directory path
*/
public static String getDirectory() {
return Main.getConfigDir() + "identities" + System.getProperty("file.separator");
}
/** Loads user-defined identity files. */
public static void loadUser() {
final File dir = new File(getDirectory());
if (!dir.exists()) {
try {
dir.mkdirs();
dir.createNewFile();
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM, "Unable to create identity dir");
}
}
loadUser(dir);
}
/**
* Recursively loads files from the specified directory.
*
* @param dir The directory to be loaded
*/
@Precondition({
"The specified File is not null",
"The specified File is a directory"
})
private static void loadUser(final File dir) {
Logger.assertTrue(dir != null);
Logger.assertTrue(dir.isDirectory());
if (dir.listFiles() == null) {
Logger.userError(ErrorLevel.MEDIUM,
"Unable to load user identity files from "
+ dir.getAbsolutePath());
} else {
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
loadUser(file);
} else {
loadIdentity(file);
}
}
}
}
/**
* Loads an identity from the specified file. If the identity already
* exists, it is told to reload instead.
*
* @param file The file to load the identity from.
*/
private static void loadIdentity(final File file) {
synchronized (IDENTITIES) {
for (Identity identity : getAllIdentities()) {
if (identity.isFile(file)) {
try {
identity.reload();
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"I/O error when reloading identity file: "
+ file.getAbsolutePath() + " (" + ex.getMessage() + ")");
} catch (InvalidConfigFileException ex) {
// Do nothing
}
return;
}
}
}
try {
addIdentity(new Identity(file, false));
} catch (InvalidIdentityFileException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"Invalid identity file: " + file.getAbsolutePath()
+ " (" + ex.getMessage() + ")");
} catch (IOException ex) {
Logger.userError(ErrorLevel.MEDIUM,
"I/O error when reading identity file: "
+ file.getAbsolutePath());
}
}
/**
* Retrieves all known identities.
*
* @return A set of all known identities
* @since 0.6.4
*/
private static Set<Identity> getAllIdentities() {
final Set<Identity> res = new LinkedHashSet<Identity>();
for (Map.Entry<String, List<Identity>> entry : IDENTITIES.entrySet()) {
res.addAll(entry.getValue());
}
return res;
}
/**
* Returns the "group" to which the specified identity belongs. For custom
* identities this is the custom identity type, otherwise this is
* <code>null</code>.
*
* @param identity The identity whose group is being retrieved
* @return The group of the specified identity
* @since 0.6.4
*/
private static String getGroup(final Identity identity) {
return identity.getTarget().getType() == ConfigTarget.TYPE.CUSTOM
? identity.getTarget().getData() : null;
}
/** Loads the version information. */
public static void loadVersion() {
try {
versionConfig = new Identity(Main.class.getResourceAsStream("version.config"), false);
addIdentity(versionConfig);
} catch (IOException ex) {
Logger.appError(ErrorLevel.FATAL, "Unable to load version information", ex);
} catch (InvalidIdentityFileException ex) {
Logger.appError(ErrorLevel.FATAL, "Unable to load version information", ex);
}
}
/**
* Loads the config identity.
*
* @throws InvalidIdentityFileException if there is a problem with the
* config file.
*/
private static void loadConfig() throws InvalidIdentityFileException {
try {
final File file = new File(Main.getConfigDir() + "dmdirc.config");
if (!file.exists()) {
file.createNewFile();
}
config = new Identity(file, true);
config.setOption("identity", "name", "Global config");
addIdentity(config);
} catch (IOException ex) {
Logger.userError(ErrorLevel.FATAL, "I/O error when loading global config: "
+ ex.getMessage(), ex);
}
}
/**
* Retrieves the identity used for the global config.
*
* @return The global config identity
*/
public static Identity getConfigIdentity() {
return config;
}
/**
* Retrieves the identity used for addons defaults.
*
* @return The addons defaults identity
*/
public static Identity getAddonIdentity() {
return addonConfig;
}
/**
* Retrieves the identity bundled with the DMDirc client containing
* version information.
*
* @return The version identity
* @since 0.6.3m2
*/
public static Identity getVersionIdentity() {
return versionConfig;
}
/**
* Saves all modified identity files to disk.
*/
public static void save() {
synchronized (IDENTITIES) {
for (Identity identity : getAllIdentities()) {
identity.save();
}
}
}
/**
* Adds the specific identity to this manager.
* @param identity The identity to be added
*/
@Precondition("The specified Identity is not null")
public static void addIdentity(final Identity identity) {
Logger.assertTrue(identity != null);
final String target = getGroup(identity);
if (IDENTITIES.containsValue(target, identity)) {
removeIdentity(identity);
}
synchronized (IDENTITIES) {
IDENTITIES.add(target, identity);
}
LOGGER.finer("Adding identity: " + identity + " (group: " + target + ")");
synchronized (LISTENERS) {
for (IdentityListener listener : LISTENERS.safeGet(target)) {
listener.identityAdded(identity);
}
}
}
/**
* Removes an identity from this manager.
* @param identity The identity to be removed
*/
@Precondition({
"The specified Identity is not null",
"The specified Identity has previously been added and not removed"
})
public static void removeIdentity(final Identity identity) {
Logger.assertTrue(identity != null);
final String group = getGroup(identity);
Logger.assertTrue(IDENTITIES.containsValue(group, identity));
synchronized (IDENTITIES) {
IDENTITIES.remove(group, identity);
}
synchronized (LISTENERS) {
for (IdentityListener listener : LISTENERS.safeGet(group)) {
listener.identityRemoved(identity);
}
}
}
/**
* Adds a config manager to this manager.
*
* @param manager The ConfigManager to add
* @deprecated Use {@link #addIdentityListener(com.dmdirc.config.IdentityListener)}
*/
@Deprecated
@Precondition("The specified ConfigManager is not null")
public static void addConfigManager(final ConfigManager manager) {
addIdentityListener(manager);
}
/**
* Adds a new identity listener which will be informed of all settings
* identities which are added to this manager.
*
* @param listener The listener to be added
* @since 0.6.4
*/
@Precondition("The specified listener is not null")
public static void addIdentityListener(final IdentityListener listener) {
addIdentityListener(null, listener);
}
/**
* Adds a new identity listener which will be informed of all identities
* of the specified custom type which are added to this manager.
*
* @param type The type of identities to listen for
* @param listener The listener to be added
* @since 0.6.4
*/
@Precondition("The specified listener is not null")
public static void addIdentityListener(final String type, final IdentityListener listener) {
Logger.assertTrue(listener != null);
synchronized (LISTENERS) {
LISTENERS.add(type, listener);
}
}
/**
* Retrieves a list of identities that serve as profiles.
*
* @return A list of profiles
* @deprecated Use {@link #getCustomIdentities(java.lang.String)} with
* an argument of <code>profile</code> to retrieve profiles.
*/
@Deprecated
public static List<Identity> getProfiles() {
return getCustomIdentities("profile");
}
/**
* Retrieves a list of identities that belong to the specified custom type.
*
* @param type The type of identity to search for
* @return A list of matching identities
* @since 0.6.4
*/
public static List<Identity> getCustomIdentities(final String type) {
return Collections.unmodifiableList(IDENTITIES.safeGet(type));
}
/**
* Retrieves a list of all config sources that should be applied to the
* specified config manager.
*
* @param manager The manager requesting sources
* @return A list of all matching config sources
*/
public static List<Identity> getSources(final ConfigManager manager) {
final List<Identity> sources = new ArrayList<Identity>();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (manager.identityApplies(identity)) {
sources.add(identity);
}
}
}
Collections.sort(sources);
return sources;
}
/**
* Retrieves the global config manager.
*
* @return The global config manager
*/
public static synchronized ConfigManager getGlobalConfig() {
if (globalconfig == null) {
globalconfig = new ConfigManager("", "", "", "");
}
return globalconfig;
}
/**
* Retrieves the config for the specified channel@network. The config is
* created if it doesn't exist.
*
* @param network The name of the network
* @param channel The name of the channel
* @return A config source for the channel
*/
@Precondition({
"The specified network is non-null and not empty",
"The specified channel is non-null and not empty"
})
public static Identity getChannelConfig(final String network, final String channel) {
if (network == null || network.isEmpty()) {
throw new IllegalArgumentException("getChannelConfig called "
+ "with null or empty network\n\nNetwork: " + network);
}
if (channel == null || channel.isEmpty()) {
throw new IllegalArgumentException("getChannelConfig called "
+ "with null or empty channel\n\nChannel: " + channel);
}
final String myTarget = (channel + "@" + network).toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.CHANNEL
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setChannel(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create channel identity", ex);
return null;
}
}
/**
* Retrieves the config for the specified network. The config is
* created if it doesn't exist.
*
* @param network The name of the network
* @return A config source for the network
*/
@Precondition("The specified network is non-null and not empty")
public static Identity getNetworkConfig(final String network) {
if (network == null || network.isEmpty()) {
throw new IllegalArgumentException("getNetworkConfig called "
+ "with null or empty network\n\nNetwork:" + network);
}
final String myTarget = network.toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.NETWORK
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setNetwork(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create network identity", ex);
return null;
}
}
/**
* Retrieves the config for the specified server. The config is
* created if it doesn't exist.
*
* @param server The name of the server
* @return A config source for the server
*/
@Precondition("The specified server is non-null and not empty")
public static Identity getServerConfig(final String server) {
if (server == null || server.isEmpty()) {
throw new IllegalArgumentException("getServerConfig called "
+ "with null or empty server\n\nServer: " + server);
}
final String myTarget = server.toLowerCase();
synchronized (IDENTITIES) {
for (Identity identity : IDENTITIES.safeGet(null)) {
if (identity.getTarget().getType() == ConfigTarget.TYPE.SERVER
&& identity.getTarget().getData().equalsIgnoreCase(myTarget)) {
return identity;
}
}
}
// We need to create one
final ConfigTarget target = new ConfigTarget();
target.setServer(myTarget);
try {
return Identity.buildIdentity(target);
} catch (IOException ex) {
Logger.userError(ErrorLevel.HIGH, "Unable to create network identity", ex);
return null;
}
}
}
| Fix some stupidities
Change-Id: I34118f2593a0499cd5d912b554158b4a146a5394
Reviewed-on: http://gerrit.dmdirc.com/1709
Reviewed-by: Greg Holmes <[email protected]>
Automatic-Compile: DMDirc Local Commits <[email protected]>
| src/com/dmdirc/config/IdentityManager.java | Fix some stupidities | <ide><path>rc/com/dmdirc/config/IdentityManager.java
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Set;
<add>import java.util.logging.Level;
<ide>
<ide> /**
<ide> * The identity manager manages all known identities, providing easy methods
<ide> */
<ide> public static void load() throws InvalidIdentityFileException {
<ide> IDENTITIES.clear();
<del> IDENTITIES.clear();
<ide>
<ide> loadVersion();
<ide> loadDefaults();
<ide> loadUser();
<ide> loadConfig();
<ide>
<del> if (getProfiles().size() == 0) {
<add> if (getCustomIdentities("profile").isEmpty()) {
<ide> try {
<ide> Identity.buildProfile("Default Profile");
<ide> } catch (IOException ex) {
<ide> IDENTITIES.add(target, identity);
<ide> }
<ide>
<del> LOGGER.finer("Adding identity: " + identity + " (group: " + target + ")");
<add> LOGGER.log(Level.FINER, "Adding identity: {0} (group: {1})",
<add> new Object[]{identity, target});
<ide>
<ide> synchronized (LISTENERS) {
<ide> for (IdentityListener listener : LISTENERS.safeGet(target)) { |
|
Java | apache-2.0 | ee6cbcd05fda6d489c572c279b0826193a7836a3 | 0 | dgrove727/autopsy,dgrove727/autopsy,APriestman/autopsy,wschaeferB/autopsy,esaunders/autopsy,rcordovano/autopsy,dgrove727/autopsy,millmanorama/autopsy,esaunders/autopsy,narfindustries/autopsy,narfindustries/autopsy,esaunders/autopsy,wschaeferB/autopsy,esaunders/autopsy,rcordovano/autopsy,APriestman/autopsy,rcordovano/autopsy,narfindustries/autopsy,millmanorama/autopsy,esaunders/autopsy,APriestman/autopsy,rcordovano/autopsy,millmanorama/autopsy,APriestman/autopsy,wschaeferB/autopsy,millmanorama/autopsy,APriestman/autopsy,rcordovano/autopsy,wschaeferB/autopsy,APriestman/autopsy,wschaeferB/autopsy,rcordovano/autopsy,APriestman/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2012-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskException;
/**
* Support for TSK_EMAIL_MSG nodes and displaying emails in the directory tree.
* Email messages are grouped into parent folders, and the folders are grouped
* into parent accounts if TSK_PATH is available to define the relationship
* structure for every message.
*/
public class EmailExtracted implements AutopsyVisitableItem {
private static final String LABEL_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getLabel();
private static final String DISPLAY_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getDisplayName();
private static final Logger logger = Logger.getLogger(EmailExtracted.class.getName());
private static final String MAIL_ACCOUNT = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailAccount.text");
private static final String MAIL_FOLDER = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailFolder.text");
private static final String MAIL_PATH_SEPARATOR = "/";
private SleuthkitCase skCase;
private final EmailResults emailResults;
public EmailExtracted(SleuthkitCase skCase) {
this.skCase = skCase;
emailResults = new EmailResults();
}
private final class EmailResults extends Observable {
private final Map<String, Map<String, List<Long>>> accounts = new LinkedHashMap<>();
EmailResults() {
update();
}
public Set<String> getAccounts() {
return accounts.keySet();
}
public Set<String> getFolders(String account) {
return accounts.get(account).keySet();
}
public List<Long> getArtifactIds(String account, String folder) {
return accounts.get(account).get(folder);
}
@SuppressWarnings("deprecation")
public void update() {
accounts.clear();
if (skCase == null) {
return;
}
int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID();
int pathAttrId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH.getTypeID();
String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
+ "attribute_type_id=" + pathAttrId //NON-NLS
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
+ " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
final String path = resultSet.getString("value_text"); //NON-NLS
final long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
final Map<String, String> parsedPath = parsePath(path);
final String account = parsedPath.get(MAIL_ACCOUNT);
final String folder = parsedPath.get(MAIL_FOLDER);
Map<String, List<Long>> folders = accounts.get(account);
if (folders == null) {
folders = new LinkedHashMap<>();
accounts.put(account, folders);
}
List<Long> messages = folders.get(folder);
if (messages == null) {
messages = new ArrayList<>();
folders.put(folder, messages);
}
messages.add(artifactId);
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "Cannot initialize email extraction: ", ex); //NON-NLS
}
setChanged();
notifyObservers();
}
private Map<String, String> parsePath(String path) {
Map<String, String> parsed = new HashMap<>();
String[] split = path.split(MAIL_PATH_SEPARATOR);
if (split.length < 4) {
parsed.put(MAIL_ACCOUNT, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultAcct.text"));
parsed.put(MAIL_FOLDER, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultFolder.text"));
return parsed;
}
parsed.put(MAIL_ACCOUNT, split[2]);
parsed.put(MAIL_FOLDER, split[3]);
return parsed;
}
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
/**
* Mail root node grouping all mail accounts, supports account-> folder
* structure
*/
public class RootNode extends DisplayableItemNode {
public RootNode() {
super(Children.create(new AccountFactory(), true), Lookups.singleton(DISPLAY_NAME));
super.setName(LABEL_NAME);
super.setDisplayName(DISPLAY_NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/mail-icon-16.png"); //NON-NLS
emailResults.update();
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedRoot"; //NON-NLS
// }
}
/**
* Mail root child node creating each account node
*/
private class AccountFactory extends ChildFactory.Detachable<String> implements Observer {
/*
* The pcl is in the class because it has the easiest mechanisms to add
* and remove itself during its life cycles.
*/
private final PropertyChangeListener pcl = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCase();
/**
* Even with the check above, it is still possible that
* the case will be closed in a different thread before
* this code executes. If that happens, it is possible
* for the event to have a null oldValue.
*/
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()) {
emailResults.update();
}
} catch (IllegalStateException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCase();
emailResults.update();
} catch (IllegalStateException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify();
skCase = null;
}
}
}
};
@Override
protected void addNotify() {
IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
Case.addPropertyChangeListener(pcl);
emailResults.update();
emailResults.addObserver(this);
}
@Override
protected void removeNotify() {
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removePropertyChangeListener(pcl);
emailResults.deleteObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(emailResults.getAccounts());
return true;
}
@Override
protected Node createNodeForKey(String key) {
return new AccountNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* Account node representation
*/
public class AccountNode extends DisplayableItemNode implements Observer {
private final String accountName;
public AccountNode(String accountName) {
super(Children.create(new FolderFactory(accountName), true), Lookups.singleton(accountName));
super.setName(accountName);
this.accountName = accountName;
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/account-icon-16.png"); //NON-NLS
updateDisplayName();
emailResults.addObserver(this);
}
private void updateDisplayName() {
super.setDisplayName(accountName + " (" + emailResults.getFolders(accountName) + ")");
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedAccount"; //NON-NLS
// }
}
/**
* Account node child creating sub nodes for every folder
*/
private class FolderFactory extends ChildFactory<String> implements Observer {
private final String accountName;
private FolderFactory(String accountName) {
super();
this.accountName = accountName;
emailResults.addObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(emailResults.getFolders(accountName));
return true;
}
@Override
protected Node createNodeForKey(String folderName) {
return new FolderNode(accountName, folderName);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* Node representing mail folder
*/
public class FolderNode extends DisplayableItemNode implements Observer {
private final String accountName;
private final String folderName;
public FolderNode(String accountName, String folderName) {
super(Children.create(new MessageFactory(accountName, folderName), true), Lookups.singleton(accountName));
super.setName(folderName);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-16.png"); //NON-NLS
this.accountName = accountName;
this.folderName = folderName;
updateDisplayName();
emailResults.addObserver(this);
}
private void updateDisplayName() {
super.setDisplayName(folderName + " (" + emailResults.getArtifactIds(accountName, folderName).size() + ")");
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedFolder"; //NON-NLS
// }
}
/**
* Node representing mail folder content (mail messages)
*/
private class MessageFactory extends ChildFactory<Long> implements Observer {
private final String accountName;
private final String folderName;
private MessageFactory(String accountName, String folderName) {
super();
this.accountName = accountName;
this.folderName = folderName;
emailResults.addObserver(this);
}
@Override
protected boolean createKeys(List<Long> list) {
list.addAll(emailResults.getArtifactIds(accountName, folderName));
return true;
}
@Override
protected Node createNodeForKey(Long artifactId) {
if (skCase == null) {
return null;
}
try {
BlackboardArtifact artifact = skCase.getBlackboardArtifact(artifactId);
return new BlackboardArtifactNode(artifact);
} catch (TskException ex) {
logger.log(Level.WARNING, "Error creating mail messages nodes", ex); //NON-NLS
}
return null;
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
}
| Core/src/org/sleuthkit/autopsy/datamodel/EmailExtracted.java | /*
* Autopsy Forensic Browser
*
* Copyright 2012-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskException;
/**
* Support for TSK_EMAIL_MSG nodes and displaying emails in the directory tree.
* Email messages are grouped into parent folders, and the folders are grouped
* into parent accounts if TSK_PATH is available to define the relationship
* structure for every message.
*/
public class EmailExtracted implements AutopsyVisitableItem {
private static final String LABEL_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getLabel();
private static final String DISPLAY_NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getDisplayName();
private static final Logger logger = Logger.getLogger(EmailExtracted.class.getName());
private static final String MAIL_ACCOUNT = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailAccount.text");
private static final String MAIL_FOLDER = NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.mailFolder.text");
private static final String MAIL_PATH_SEPARATOR = "/";
private SleuthkitCase skCase;
private final EmailResults emailResults;
public EmailExtracted(SleuthkitCase skCase) {
this.skCase = skCase;
emailResults = new EmailResults();
}
private final class EmailResults extends Observable {
private final Map<String, Map<String, List<Long>>> accounts = new LinkedHashMap<>();
EmailResults() {
update();
}
public Set<String> getAccounts() {
return accounts.keySet();
}
public Set<String> getFolders(String account) {
return accounts.get(account).keySet();
}
public List<Long> getArtifactIds(String account, String folder) {
return accounts.get(account).get(folder);
}
@SuppressWarnings("deprecation")
public void update() {
accounts.clear();
if (skCase == null) {
return;
}
int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID();
int pathAttrId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH.getTypeID();
String query = "SELECT value_text,blackboard_attributes.artifact_id,attribute_type_id " //NON-NLS
+ "FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
+ "attribute_type_id=" + pathAttrId //NON-NLS
+ " AND blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id" //NON-NLS
+ " AND blackboard_artifacts.artifact_type_id=" + artId; //NON-NLS
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
final String path = resultSet.getString("value_text"); //NON-NLS
final long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
final Map<String, String> parsedPath = parsePath(path);
final String account = parsedPath.get(MAIL_ACCOUNT);
final String folder = parsedPath.get(MAIL_FOLDER);
Map<String, List<Long>> folders = accounts.get(account);
if (folders == null) {
folders = new LinkedHashMap<>();
accounts.put(account, folders);
}
List<Long> messages = folders.get(folder);
if (messages == null) {
messages = new ArrayList<>();
folders.put(folder, messages);
}
messages.add(artifactId);
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "Cannot initialize email extraction: ", ex); //NON-NLS
}
}
private Map<String, String> parsePath(String path) {
Map<String, String> parsed = new HashMap<>();
String[] split = path.split(MAIL_PATH_SEPARATOR);
if (split.length < 4) {
parsed.put(MAIL_ACCOUNT, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultAcct.text"));
parsed.put(MAIL_FOLDER, NbBundle.getMessage(EmailExtracted.class, "EmailExtracted.defaultFolder.text"));
return parsed;
}
parsed.put(MAIL_ACCOUNT, split[2]);
parsed.put(MAIL_FOLDER, split[3]);
return parsed;
}
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
/**
* Mail root node grouping all mail accounts, supports account-> folder
* structure
*/
public class RootNode extends DisplayableItemNode {
public RootNode() {
super(Children.create(new AccountFactory(), true), Lookups.singleton(DISPLAY_NAME));
super.setName(LABEL_NAME);
super.setDisplayName(DISPLAY_NAME);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/mail-icon-16.png"); //NON-NLS
emailResults.update();
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedRoot"; //NON-NLS
// }
}
/**
* Mail root child node creating each account node
*/
private class AccountFactory extends ChildFactory.Detachable<String> implements Observer {
/*
* The pcl is in the class because it has the easiest mechanisms to add
* and remove itself during its life cycles.
*/
private final PropertyChangeListener pcl = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCase();
/**
* Even with the check above, it is still possible that
* the case will be closed in a different thread before
* this code executes. If that happens, it is possible
* for the event to have a null oldValue.
*/
ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
if (null != eventData && eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()) {
emailResults.update();
}
} catch (IllegalStateException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
/**
* Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked
* out. Currently, remote events may be received for a case
* that is already closed.
*/
try {
Case.getCurrentCase();
emailResults.update();
} catch (IllegalStateException notUsed) {
/**
* Case is closed, do nothing.
*/
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify();
skCase = null;
}
}
}
};
@Override
protected void addNotify() {
IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
Case.addPropertyChangeListener(pcl);
emailResults.update();
emailResults.addObserver(this);
}
@Override
protected void removeNotify() {
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removePropertyChangeListener(pcl);
emailResults.deleteObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(emailResults.getAccounts());
return true;
}
@Override
protected Node createNodeForKey(String key) {
return new AccountNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* Account node representation
*/
public class AccountNode extends DisplayableItemNode implements Observer {
private final String accountName;
public AccountNode(String accountName) {
super(Children.create(new FolderFactory(accountName), true), Lookups.singleton(accountName));
super.setName(accountName);
this.accountName = accountName;
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/account-icon-16.png"); //NON-NLS
updateDisplayName();
emailResults.addObserver(this);
}
private void updateDisplayName() {
super.setDisplayName(accountName + " (" + emailResults.getFolders(accountName) + ")");
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedAccount"; //NON-NLS
// }
}
/**
* Account node child creating sub nodes for every folder
*/
private class FolderFactory extends ChildFactory<String> implements Observer {
private final String accountName;
private FolderFactory(String accountName) {
super();
this.accountName = accountName;
emailResults.addObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(emailResults.getFolders(accountName));
return true;
}
@Override
protected Node createNodeForKey(String folderName) {
return new FolderNode(accountName, folderName);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
/**
* Node representing mail folder
*/
public class FolderNode extends DisplayableItemNode implements Observer {
private final String accountName;
private final String folderName;
public FolderNode(String accountName, String folderName) {
super(Children.create(new MessageFactory(accountName, folderName), true), Lookups.singleton(accountName));
super.setName(folderName);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/folder-icon-16.png"); //NON-NLS
this.accountName = accountName;
this.folderName = folderName;
updateDisplayName();
emailResults.addObserver(this);
}
private void updateDisplayName() {
super.setDisplayName(folderName + " (" + emailResults.getArtifactIds(accountName, folderName).size() + ")");
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "EmailExtracted.createSheet.name.desc"),
getName()));
return s;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
/*
* TODO (AUT-1849): Correct or remove peristent column reordering code
*
* Added to support this feature.
*/
// @Override
// public String getItemType() {
// return "EmailExtractedFolder"; //NON-NLS
// }
}
/**
* Node representing mail folder content (mail messages)
*/
private class MessageFactory extends ChildFactory<Long> implements Observer {
private final String accountName;
private final String folderName;
private MessageFactory(String accountName, String folderName) {
super();
this.accountName = accountName;
this.folderName = folderName;
emailResults.addObserver(this);
}
@Override
protected boolean createKeys(List<Long> list) {
list.addAll(emailResults.getArtifactIds(accountName, folderName));
return true;
}
@Override
protected Node createNodeForKey(Long artifactId) {
if (skCase == null) {
return null;
}
try {
BlackboardArtifact artifact = skCase.getBlackboardArtifact(artifactId);
return new BlackboardArtifactNode(artifact);
} catch (TskException ex) {
logger.log(Level.WARNING, "Error creating mail messages nodes", ex); //NON-NLS
}
return null;
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
}
| Fix bug in email artifacts refresh
| Core/src/org/sleuthkit/autopsy/datamodel/EmailExtracted.java | Fix bug in email artifacts refresh | <ide><path>ore/src/org/sleuthkit/autopsy/datamodel/EmailExtracted.java
<ide> } catch (TskCoreException | SQLException ex) {
<ide> logger.log(Level.WARNING, "Cannot initialize email extraction: ", ex); //NON-NLS
<ide> }
<add> setChanged();
<add> notifyObservers();
<ide> }
<ide>
<ide> private Map<String, String> parsePath(String path) { |
|
Java | apache-2.0 | c495f288e2f84b31978a6684a900af0f4b614dff | 0 | jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics | /*
* Java Genetic Algorithm Library (@__identifier__@).
* Copyright (c) @__year__@ Franz Wilhelmstötter
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author:
* Franz Wilhelmstötter ([email protected])
*
*/
package org.jenetics.util;
import static org.jenetics.util.object.nonNull;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
* @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
* @since 1.0
* @version 1.3 — <em>$Date: 2013-06-03 $</em>
*/
abstract class ArraySeq<T> implements Seq<T>, Serializable {
private static final long serialVersionUID = 1L;
transient ArrayRef _array;
transient int _start;
transient int _end;
transient int _length;
/**
* <i>Universal</i> array constructor.
*
* @param array the array which holds the elements. The array will not be
* copied.
* @param start the start index of the given array (exclusively).
* @param end the end index of the given array (exclusively)
* @throws NullPointerException if the given {@code array} is {@code null}.
* @throws IndexOutOfBoundsException for an illegal start/end point index
* value ({@code start < 0 || end > array.lenght || start > end}).
*/
ArraySeq(final ArrayRef array, final int start, final int end) {
nonNull(array, "Array");
if (start < 0 || end > array.length || start > end) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Invalid index range: [%d, %s)", start, end
));
}
_array = array;
_start = start;
_end = end;
_length = _end - _start;
}
ArraySeq(final int length) {
this(new ArrayRef(length), 0, length);
}
@Override
@SuppressWarnings("unchecked")
public T get(final int index) {
checkIndex(index);
return (T)_array.data[index + _start];
}
@Override
public int indexOf(final Object element) {
return indexOf(element, 0, length());
}
@Override
public int indexOf(final Object element, final int start) {
return indexOf(element, start, length());
}
@Override
public int indexOf(final Object element, final int start, final int end) {
checkIndex(start, end);
final int n = end + _start;
int index = -1;
if (element == null) {
for (int i = start + _start; i < n && index == -1; ++i) {
if (_array.data[i] == null) {
index = i - _start;
}
}
} else {
for (int i = _start + start; i < n && index == -1; ++i) {
if (element.equals(_array.data[i])) {
index = i - _start;
}
}
}
return index;
}
@Override
public int indexWhere(final Function<? super T, Boolean> predicate) {
return indexWhere(predicate, 0, length());
}
@Override
public int indexWhere(
final Function<? super T, Boolean> predicate,
final int start
) {
return indexWhere(predicate, start, length());
}
@Override
public int indexWhere(
final Function<? super T, Boolean> predicate,
final int start,
final int end
) {
nonNull(predicate, "Predicate");
int index = -1;
for (int i = start + _start, n = end + _start; i < n && index == -1; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
if (predicate.apply(element) == Boolean.TRUE) {
index = i - _start;
}
}
return index;
}
@Override
public int lastIndexOf(final Object element) {
return lastIndexOf(element, 0, length());
}
@Override
public int lastIndexOf(final Object element, final int end) {
return lastIndexOf(element, 0, end);
}
@Override
public int lastIndexOf(final Object element, final int start, final int end) {
checkIndex(start, end);
int index = -1;
if (element == null) {
for (int i = end + _start; --i >= start + _start && index == -1;) {
if (_array.data[i] == null) {
index = i - _start;
}
}
} else {
for (int i = end + _start; --i >= start + _start && index == -1;) {
if (element.equals(_array.data[i])) {
index = i - _start;
}
}
}
return index;
}
@Override
public int lastIndexWhere(final Function<? super T, Boolean> predicate) {
return lastIndexWhere(predicate, 0, length());
}
@Override
public int lastIndexWhere(
final Function<? super T, Boolean> predicate,
final int end
) {
return lastIndexWhere(predicate, 0, end);
}
@Override
public int lastIndexWhere(
final Function<? super T, Boolean> predicate,
final int start,
final int end
) {
nonNull(predicate, "Predicate");
checkIndex(start, end);
int index = -1;
for (int i = end + _start; --i >= _start && index == -1;) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
if (predicate.apply(element) == Boolean.TRUE) {
index = i - _start;
}
}
return index;
}
@Override
public <R> void foreach(final Function<? super T, ? extends R> function) {
forEach(function);
}
@Override
public <R> void forEach(final Function<? super T, ? extends R> function) {
nonNull(function, "Function");
for (int i = _start; i < _end; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
function.apply(element);
}
}
@Override
public boolean forall(final Function<? super T, Boolean> predicate) {
return forAll(predicate);
}
@Override
public boolean forAll(final Function<? super T, Boolean> predicate) {
nonNull(predicate, "Predicate");
boolean valid = true;
for (int i = _start; i < _end && valid; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
valid = predicate.apply(element).booleanValue();
}
return valid;
}
/*
<B> B foldLeft(final B z, final Function2<? super B, ? super T, ? extends B> op) {
B result = z;
for (int i = 0, n = length(); i < n; ++i) {
@SuppressWarnings("unchecked")
final T value = (T)_array.data[i + _start];
result = op.apply(result, value);
}
return z;
}
<B> B foldRight(final B z, final Function2<? super T, ? super B, ? extends B> op) {
B result = z;
for (int i = length(); --i >= 0;) {
@SuppressWarnings("unchecked")
final T value = (T)_array.data[i + _start];
result = op.apply(value, result);
}
return z;
}
interface Function2<T1, T2, R> {
R apply(T1 t1, T2 t2);
}
*/
@Override
public boolean contains(final Object element) {
return indexOf(element) != -1;
}
@Override
public int length() {
return _length;
}
@Override
public Iterator<T> iterator() {
return new ArraySeqIterator<>(this);
}
@Override
public <B> Iterator<B> iterator(
final Function<? super T, ? extends B> converter
) {
nonNull(converter, "Converter");
return new Iterator<B>() {
private final Iterator<T> _iterator = iterator();
@Override public boolean hasNext() {
return _iterator.hasNext();
}
@Override public B next() {
return converter.apply(_iterator.next());
}
@Override public void remove() {
_iterator.remove();
}
};
}
@Override
public Object[] toArray() {
Object[] array = null;
if (length() == _array.data.length) {
array = _array.data.clone();
} else {
array = new Object[length()];
System.arraycopy(_array.data, _start, array, 0, length());
}
return array;
}
@SuppressWarnings("unchecked")
@Override
public T[] toArray(final T[] array) {
T[] result = null;
if (array.length < length()) {
result = (T[])Arrays.copyOfRange(_array.data, _start, _end, array.getClass());
} else {
System.arraycopy(_array.data, _start, array, 0, length());
if (array.length > length()) {
array[length()] = null;
}
result = array;
}
return result;
}
@Override
public List<T> asList() {
return new ArraySeqList<>(this);
}
final void checkIndex(final int index) {
if (index < 0 || index >= length()) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Index %s is out of bounds [0, %s)", index, length()
));
}
}
final void checkIndex(final int from, final int to) {
if (from > to) {
throw new ArrayIndexOutOfBoundsException(
"fromIndex(" + from + ") > toIndex(" + to+ ")"
);
}
if (from < 0 || to > length()) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Invalid index range: [%d, %s)", from, to
));
}
}
@Override
public int hashCode() {
return arrays.hashCode(this);
}
@Override
public boolean equals(final Object obj) {
return arrays.equals(this, obj);
}
@Override
public String toString(
final String prefix,
final String separator,
final String suffix
) {
final StringBuilder out = new StringBuilder();
out.append(prefix);
if (length() > 0) {
out.append(_array.data[_start]);
}
for (int i = _start + 1; i < _end; ++i) {
out.append(separator);
out.append(_array.data[i]);
}
out.append(suffix);
return out.toString();
}
@Override
public String toString(final String separator) {
return toString("", separator, "");
}
@Override
public String toString() {
return toString("[", ",", "]");
}
private void writeObject(final ObjectOutputStream out)
throws IOException
{
out.defaultWriteObject();
out.writeInt(length());
for (int i = _start; i < _end; ++i) {
out.writeObject(_array.data[i]);
}
}
private void readObject(final ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
_length = in.readInt();
_array = new ArrayRef(_length);
_start = 0;
_end = _length;
for (int i = 0; i < _length; ++i) {
_array.data[i] = in.readObject();
}
}
}
| org.jenetics/src/main/java/org/jenetics/util/ArraySeq.java | /*
* Java Genetic Algorithm Library (@__identifier__@).
* Copyright (c) @__year__@ Franz Wilhelmstötter
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* Author:
* Franz Wilhelmstötter ([email protected])
*
*/
package org.jenetics.util;
import static org.jenetics.util.object.nonNull;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
* @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
* @since 1.0
* @version 1.0 — <em>$Date: 2013-06-03 $</em>
*/
abstract class ArraySeq<T> implements Seq<T>, Serializable {
private static final long serialVersionUID = 1L;
transient ArrayRef _array;
transient int _start;
transient int _end;
transient int _length;
/**
* <i>Universal</i> array constructor.
*
* @param array the array which holds the elements. The array will not be
* copied.
* @param start the start index of the given array (exclusively).
* @param end the end index of the given array (exclusively)
* @throws NullPointerException if the given {@code array} is {@code null}.
* @throws IndexOutOfBoundsException for an illegal start/end point index
* value ({@code start < 0 || end > array.lenght || start > end}).
*/
ArraySeq(final ArrayRef array, final int start, final int end) {
nonNull(array, "Array");
if (start < 0 || end > array.length || start > end) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Invalid index range: [%d, %s)", start, end
));
}
_array = array;
_start = start;
_end = end;
_length = _end - _start;
}
ArraySeq(final int length) {
this(new ArrayRef(length), 0, length);
}
@Override
@SuppressWarnings("unchecked")
public T get(final int index) {
checkIndex(index);
return (T)_array.data[index + _start];
}
@Override
public int indexOf(final Object element) {
return indexOf(element, 0, length());
}
@Override
public int indexOf(final Object element, final int start) {
return indexOf(element, start, length());
}
@Override
public int indexOf(final Object element, final int start, final int end) {
checkIndex(start, end);
final int n = end + _start;
int index = -1;
if (element == null) {
for (int i = start + _start; i < n && index == -1; ++i) {
if (_array.data[i] == null) {
index = i - _start;
}
}
} else {
for (int i = _start + start; i < n && index == -1; ++i) {
if (element.equals(_array.data[i])) {
index = i - _start;
}
}
}
return index;
}
@Override
public int indexWhere(final Function<? super T, Boolean> predicate) {
return indexWhere(predicate, 0, length());
}
@Override
public int indexWhere(
final Function<? super T, Boolean> predicate,
final int start
) {
return indexWhere(predicate, start, length());
}
@Override
public int indexWhere(
final Function<? super T, Boolean> predicate,
final int start,
final int end
) {
nonNull(predicate, "Predicate");
int index = -1;
for (int i = start + _start, n = end + _start; i < n && index == -1; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
if (predicate.apply(element) == Boolean.TRUE) {
index = i - _start;
}
}
return index;
}
@Override
public int lastIndexOf(final Object element) {
return lastIndexOf(element, 0, length());
}
@Override
public int lastIndexOf(final Object element, final int end) {
return lastIndexOf(element, 0, end);
}
@Override
public int lastIndexOf(final Object element, final int start, final int end) {
checkIndex(start, end);
int index = -1;
if (element == null) {
for (int i = end + _start; --i >= start + _start && index == -1;) {
if (_array.data[i] == null) {
index = i - _start;
}
}
} else {
for (int i = end + _start; --i >= start + _start && index == -1;) {
if (element.equals(_array.data[i])) {
index = i - _start;
}
}
}
return index;
}
@Override
public int lastIndexWhere(final Function<? super T, Boolean> predicate) {
return lastIndexWhere(predicate, 0, length());
}
@Override
public int lastIndexWhere(
final Function<? super T, Boolean> predicate,
final int end
) {
return lastIndexWhere(predicate, 0, end);
}
@Override
public int lastIndexWhere(
final Function<? super T, Boolean> predicate,
final int start,
final int end
) {
nonNull(predicate, "Predicate");
checkIndex(start, end);
int index = -1;
for (int i = end + _start; --i >= _start && index == -1;) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
if (predicate.apply(element) == Boolean.TRUE) {
index = i - _start;
}
}
return index;
}
@Override
public <R> void foreach(final Function<? super T, ? extends R> function) {
forEach(function);
}
@Override
public <R> void forEach(final Function<? super T, ? extends R> function) {
nonNull(function, "Function");
for (int i = _start; i < _end; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
function.apply(element);
}
}
@Override
public boolean forall(final Function<? super T, Boolean> predicate) {
return forAll(predicate);
}
@Override
public boolean forAll(final Function<? super T, Boolean> predicate) {
nonNull(predicate, "Predicate");
boolean valid = true;
for (int i = _start; i < _end && valid; ++i) {
@SuppressWarnings("unchecked")
final T element = (T)_array.data[i];
valid = predicate.apply(element).booleanValue();
}
return valid;
}
/*
<B> B foldLeft(final B z, final Function2<? super B, ? super T, ? extends B> op) {
B result = z;
for (int i = 0, n = length(); i < n; ++i) {
@SuppressWarnings("unchecked")
final T value = (T)_array.data[i + _start];
result = op.apply(result, value);
}
return z;
}
<B> B foldRight(final B z, final Function2<? super T, ? super B, ? extends B> op) {
B result = z;
for (int i = length(); --i >= 0;) {
@SuppressWarnings("unchecked")
final T value = (T)_array.data[i + _start];
result = op.apply(value, result);
}
return z;
}
interface Function2<T1, T2, R> {
R apply(T1 t1, T2 t2);
}
*/
@Override
public boolean contains(final Object element) {
return indexOf(element) != -1;
}
@Override
public int length() {
return _length;
}
@Override
public Iterator<T> iterator() {
return new ArraySeqIterator<>(this);
}
@Override
public <B> Iterator<B> iterator(
final Function<? super T, ? extends B> converter
) {
nonNull(converter, "Converter");
return new Iterator<B>() {
private final Iterator<T> _iterator = iterator();
@Override public boolean hasNext() {
return _iterator.hasNext();
}
@Override public B next() {
return converter.apply(_iterator.next());
}
@Override public void remove() {
_iterator.remove();
}
};
}
@Override
public Object[] toArray() {
Object[] array = null;
if (length() == _array.data.length) {
array = _array.data.clone();
} else {
array = new Object[length()];
System.arraycopy(_array.data, _start, array, 0, length());
}
return array;
}
@SuppressWarnings("unchecked")
@Override
public T[] toArray(final T[] array) {
T[] result = null;
if (array.length < length()) {
result = (T[])Arrays.copyOfRange(_array.data, _start, _end, array.getClass());
} else {
System.arraycopy(_array.data, _start, array, 0, length());
if (array.length > length()) {
array[length()] = null;
}
result = array;
}
return result;
}
@Override
public List<T> asList() {
return new ArraySeqList<>(this);
}
final void checkIndex(final int index) {
if (index < 0 || index >= length()) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Index %s is out of bounds [0, %s)", index, length()
));
}
}
final void checkIndex(final int from, final int to) {
if (from > to) {
throw new ArrayIndexOutOfBoundsException(
"fromIndex(" + from + ") > toIndex(" + to+ ")"
);
}
if (from < 0 || to > length()) {
throw new ArrayIndexOutOfBoundsException(String.format(
"Invalid index range: [%d, %s)", from, to
));
}
}
@Override
public int hashCode() {
return arrays.hashCode(this);
}
@Override
public boolean equals(final Object obj) {
return arrays.equals(this, obj);
}
@Override
public String toString(
final String prefix,
final String separator,
final String suffix
) {
final StringBuilder out = new StringBuilder();
out.append(prefix);
if (length() > 0) {
out.append(_array.data[_start]);
}
for (int i = _start + 1; i < _end; ++i) {
out.append(separator);
out.append(_array.data[i]);
}
out.append(suffix);
return out.toString();
}
@Override
public String toString(final String separator) {
return toString("", separator, "");
}
@Override
public String toString() {
return toString("[", ",", "]");
}
private void writeObject(final ObjectOutputStream out)
throws IOException
{
out.defaultWriteObject();
out.writeInt(length());
for (int i = _start; i < _end; ++i) {
out.writeObject(_array.data[i]);
}
}
private void readObject(final ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
_length = in.readInt();
_array = new ArrayRef(_length);
_start = 0;
_end = _length;
for (int i = 0; i < _length; ++i) {
_array.data[i] = in.readObject();
}
}
}
| Mark 'Seq.forall' method as deprecated. Replaced by 'Seq.forAll'.
| org.jenetics/src/main/java/org/jenetics/util/ArraySeq.java | Mark 'Seq.forall' method as deprecated. Replaced by 'Seq.forAll'. | <ide><path>rg.jenetics/src/main/java/org/jenetics/util/ArraySeq.java
<ide> /**
<ide> * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a>
<ide> * @since 1.0
<del> * @version 1.0 — <em>$Date: 2013-06-03 $</em>
<add> * @version 1.3 — <em>$Date: 2013-06-03 $</em>
<ide> */
<ide> abstract class ArraySeq<T> implements Seq<T>, Serializable {
<ide> private static final long serialVersionUID = 1L; |
|
JavaScript | apache-2.0 | 206238893b66bbfd18a16f3e6b708480985edb38 | 0 | apache/cordova-android,apache/cordova-android,apache/cordova-android | /**
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
var fs = require('fs-extra');
var path = require('path');
const nopt = require('nopt');
var events = require('cordova-common').events;
var AndroidManifest = require('./AndroidManifest');
var checkReqs = require('./check_reqs');
var xmlHelpers = require('cordova-common').xmlHelpers;
var CordovaError = require('cordova-common').CordovaError;
var ConfigParser = require('cordova-common').ConfigParser;
var FileUpdater = require('cordova-common').FileUpdater;
var PlatformJson = require('cordova-common').PlatformJson;
var PlatformMunger = require('cordova-common').ConfigChanges.PlatformMunger;
var PluginInfoProvider = require('cordova-common').PluginInfoProvider;
const utils = require('./utils');
const GradlePropertiesParser = require('./config/GradlePropertiesParser');
function parseArguments (argv) {
return nopt({
// `jvmargs` is a valid option however, we don't actually want to parse it because we want the entire string as is.
// jvmargs: String
}, {}, argv || [], 0);
}
module.exports.prepare = function (cordovaProject, options) {
var self = this;
let args = {};
if (options && options.options) {
args = parseArguments(options.options.argv);
}
var platformJson = PlatformJson.load(this.locations.root, this.platform);
var munger = new PlatformMunger(this.platform, this.locations.root, platformJson, new PluginInfoProvider());
this._config = updateConfigFilesFrom(cordovaProject.projectConfig, munger, this.locations);
// Get the min SDK version from config.xml
const minSdkVersion = this._config.getPreference('android-minSdkVersion', 'android');
const maxSdkVersion = this._config.getPreference('android-maxSdkVersion', 'android');
const targetSdkVersion = this._config.getPreference('android-targetSdkVersion', 'android');
const androidXEnabled = this._config.getPreference('AndroidXEnabled', 'android');
const isGradlePluginKotlinEnabled = this._config.getPreference('GradlePluginKotlinEnabled', 'android');
const gradlePluginKotlinCodeStyle = this._config.getPreference('GradlePluginKotlinCodeStyle', 'android');
const gradlePropertiesUserConfig = {};
if (minSdkVersion) gradlePropertiesUserConfig.cdvMinSdkVersion = minSdkVersion;
if (maxSdkVersion) gradlePropertiesUserConfig.cdvMaxSdkVersion = maxSdkVersion;
if (targetSdkVersion) gradlePropertiesUserConfig.cdvTargetSdkVersion = targetSdkVersion;
if (args.jvmargs) gradlePropertiesUserConfig['org.gradle.jvmargs'] = args.jvmargs;
if (isGradlePluginKotlinEnabled) {
gradlePropertiesUserConfig['kotlin.code.style'] = gradlePluginKotlinCodeStyle || 'official';
}
// Both 'useAndroidX' and 'enableJetifier' are linked together.
if (androidXEnabled) {
gradlePropertiesUserConfig['android.useAndroidX'] = androidXEnabled;
gradlePropertiesUserConfig['android.enableJetifier'] = androidXEnabled;
}
const gradlePropertiesParser = new GradlePropertiesParser(this.locations.root);
gradlePropertiesParser.configure(gradlePropertiesUserConfig);
// Update own www dir with project's www assets and plugins' assets and js-files
return Promise.resolve(updateWww(cordovaProject, this.locations)).then(function () {
// update project according to config.xml changes.
return updateProjectAccordingTo(self._config, self.locations);
}).then(function () {
updateIcons(cordovaProject, path.relative(cordovaProject.root, self.locations.res));
updateSplashes(cordovaProject, path.relative(cordovaProject.root, self.locations.res));
updateFileResources(cordovaProject, path.relative(cordovaProject.root, self.locations.root));
}).then(function () {
events.emit('verbose', 'Prepared android project successfully');
});
};
module.exports.clean = function (options) {
// A cordovaProject isn't passed into the clean() function, because it might have
// been called from the platform shell script rather than the CLI. Check for the
// noPrepare option passed in by the non-CLI clean script. If that's present, or if
// there's no config.xml found at the project root, then don't clean prepared files.
var projectRoot = path.resolve(this.root, '../..');
if ((options && options.noPrepare) || !fs.existsSync(this.locations.configXml) ||
!fs.existsSync(this.locations.configXml)) {
return Promise.resolve();
}
var projectConfig = new ConfigParser(this.locations.configXml);
var self = this;
return Promise.resolve().then(function () {
cleanWww(projectRoot, self.locations);
cleanIcons(projectRoot, projectConfig, path.relative(projectRoot, self.locations.res));
cleanSplashes(projectRoot, projectConfig, path.relative(projectRoot, self.locations.res));
cleanFileResources(projectRoot, projectConfig, path.relative(projectRoot, self.locations.root));
});
};
/**
* Updates config files in project based on app's config.xml and config munge,
* generated by plugins.
*
* @param {ConfigParser} sourceConfig A project's configuration that will
* be merged into platform's config.xml
* @param {ConfigChanges} configMunger An initialized ConfigChanges instance
* for this platform.
* @param {Object} locations A map of locations for this platform
*
* @return {ConfigParser} An instance of ConfigParser, that
* represents current project's configuration. When returned, the
* configuration is already dumped to appropriate config.xml file.
*/
function updateConfigFilesFrom (sourceConfig, configMunger, locations) {
events.emit('verbose', 'Generating platform-specific config.xml from defaults for android at ' + locations.configXml);
// First cleanup current config and merge project's one into own
// Overwrite platform config.xml with defaults.xml.
fs.copySync(locations.defaultConfigXml, locations.configXml);
// Then apply config changes from global munge to all config files
// in project (including project's config)
configMunger.reapply_global_munge().save_all();
events.emit('verbose', 'Merging project\'s config.xml into platform-specific android config.xml');
// Merge changes from app's config.xml into platform's one
var config = new ConfigParser(locations.configXml);
xmlHelpers.mergeXml(sourceConfig.doc.getroot(),
config.doc.getroot(), 'android', /* clobber= */true);
config.write();
return config;
}
/**
* Logs all file operations via the verbose event stream, indented.
*/
function logFileOp (message) {
events.emit('verbose', ' ' + message);
}
/**
* Updates platform 'www' directory by replacing it with contents of
* 'platform_www' and app www. Also copies project's overrides' folder into
* the platform 'www' folder
*
* @param {Object} cordovaProject An object which describes cordova project.
* @param {Object} destinations An object that contains destination
* paths for www files.
*/
function updateWww (cordovaProject, destinations) {
var sourceDirs = [
path.relative(cordovaProject.root, cordovaProject.locations.www),
path.relative(cordovaProject.root, destinations.platformWww)
];
// If project contains 'merges' for our platform, use them as another overrides
var merges_path = path.join(cordovaProject.root, 'merges', 'android');
if (fs.existsSync(merges_path)) {
events.emit('verbose', 'Found "merges/android" folder. Copying its contents into the android project.');
sourceDirs.push(path.join('merges', 'android'));
}
var targetDir = path.relative(cordovaProject.root, destinations.www);
events.emit(
'verbose', 'Merging and updating files from [' + sourceDirs.join(', ') + '] to ' + targetDir);
FileUpdater.mergeAndUpdateDir(
sourceDirs, targetDir, { rootDir: cordovaProject.root }, logFileOp);
}
/**
* Cleans all files from the platform 'www' directory.
*/
function cleanWww (projectRoot, locations) {
var targetDir = path.relative(projectRoot, locations.www);
events.emit('verbose', 'Cleaning ' + targetDir);
// No source paths are specified, so mergeAndUpdateDir() will clear the target directory.
FileUpdater.mergeAndUpdateDir(
[], targetDir, { rootDir: projectRoot, all: true }, logFileOp);
}
/**
* Updates project structure and AndroidManifest according to project's configuration.
*
* @param {ConfigParser} platformConfig A project's configuration that will
* be used to update project
* @param {Object} locations A map of locations for this platform
*/
function updateProjectAccordingTo (platformConfig, locations) {
// Update app name by editing res/values/strings.xml
var strings = xmlHelpers.parseElementtreeSync(locations.strings);
var name = platformConfig.name();
strings.find('string[@name="app_name"]').text = name.replace(/'/g, '\\\'');
var shortName = platformConfig.shortName && platformConfig.shortName();
if (shortName && shortName !== name) {
strings.find('string[@name="launcher_name"]').text = shortName.replace(/'/g, '\\\'');
}
fs.writeFileSync(locations.strings, strings.write({ indent: 4 }), 'utf-8');
events.emit('verbose', 'Wrote out android application name "' + name + '" to ' + locations.strings);
// Java packages cannot support dashes
var androidPkgName = (platformConfig.android_packageName() || platformConfig.packageName()).replace(/-/g, '_');
var manifest = new AndroidManifest(locations.manifest);
var manifestId = manifest.getPackageId();
manifest.getActivity()
.setOrientation(platformConfig.getPreference('orientation'))
.setLaunchMode(findAndroidLaunchModePreference(platformConfig));
manifest.setVersionName(platformConfig.version())
.setVersionCode(platformConfig.android_versionCode() || default_versionCode(platformConfig.version()))
.setPackageId(androidPkgName)
.write();
// Java file paths shouldn't be hard coded
const javaDirectory = path.join(locations.javaSrc, manifestId.replace(/\./g, '/'));
const javaPattern = /\.java$/;
const java_files = utils.scanDirectory(javaDirectory, javaPattern, true).filter(function (f) {
return utils.grep(f, /extends\s+CordovaActivity/g) !== null;
});
if (java_files.length === 0) {
throw new CordovaError('No Java files found that extend CordovaActivity.');
} else if (java_files.length > 1) {
events.emit('log', 'Multiple candidate Java files that extend CordovaActivity found. Guessing at the first one, ' + java_files[0]);
}
const destFile = java_files[0];
// var destFile = path.join(locations.root, 'app', 'src', 'main', 'java', androidPkgName.replace(/\./g, '/'), path.basename(java_files[0]));
// fs.ensureDirSync(path.dirname(destFile));
// events.emit('verbose', java_files[0]);
// events.emit('verbose', destFile);
// console.log(locations);
// fs.copySync(java_files[0], destFile);
utils.replaceFileContents(destFile, /package [\w.]*;/, 'package ' + androidPkgName + ';');
events.emit('verbose', 'Wrote out Android package name "' + androidPkgName + '" to ' + destFile);
var removeOrigPkg = checkReqs.isWindows() || checkReqs.isDarwin()
? manifestId.toUpperCase() !== androidPkgName.toUpperCase()
: manifestId !== androidPkgName;
if (removeOrigPkg) {
// If package was name changed we need to remove old java with main activity
fs.removeSync(java_files[0]);
// remove any empty directories
var currentDir = path.dirname(java_files[0]);
var sourcesRoot = path.resolve(locations.root, 'src');
while (currentDir !== sourcesRoot) {
if (fs.existsSync(currentDir) && fs.readdirSync(currentDir).length === 0) {
fs.rmdirSync(currentDir);
currentDir = path.resolve(currentDir, '..');
} else {
break;
}
}
}
}
// Consturct the default value for versionCode as
// PATCH + MINOR * 100 + MAJOR * 10000
// see http://developer.android.com/tools/publishing/versioning.html
function default_versionCode (version) {
var nums = version.split('-')[0].split('.');
var versionCode = 0;
if (+nums[0]) {
versionCode += +nums[0] * 10000;
}
if (+nums[1]) {
versionCode += +nums[1] * 100;
}
if (+nums[2]) {
versionCode += +nums[2];
}
events.emit('verbose', 'android-versionCode not found in config.xml. Generating a code based on version in config.xml (' + version + '): ' + versionCode);
return versionCode;
}
function getImageResourcePath (resourcesDir, type, density, name, sourceName) {
if (/\.9\.png$/.test(sourceName)) {
name = name.replace(/\.png$/, '.9.png');
}
var resourcePath = path.join(resourcesDir, (density ? type + '-' + density : type), name);
return resourcePath;
}
function getAdaptiveImageResourcePath (resourcesDir, type, density, name, sourceName) {
if (/\.9\.png$/.test(sourceName)) {
name = name.replace(/\.png$/, '.9.png');
}
var resourcePath = path.join(resourcesDir, (density ? type + '-' + density + '-v26' : type), name);
return resourcePath;
}
function updateSplashes (cordovaProject, platformResourcesDir) {
var resources = cordovaProject.projectConfig.getSplashScreens('android');
// if there are "splash" elements in config.xml
if (resources.length === 0) {
events.emit('verbose', 'This app does not have splash screens defined');
return;
}
var resourceMap = mapImageResources(cordovaProject.root, platformResourcesDir, 'drawable', 'screen.png');
var hadMdpi = false;
resources.forEach(function (resource) {
if (!resource.density) {
return;
}
if (resource.density === 'mdpi') {
hadMdpi = true;
}
var targetPath = getImageResourcePath(
platformResourcesDir, 'drawable', resource.density, 'screen.png', path.basename(resource.src));
resourceMap[targetPath] = resource.src;
});
// There's no "default" drawable, so assume default == mdpi.
if (!hadMdpi && resources.defaultResource) {
var targetPath = getImageResourcePath(
platformResourcesDir, 'drawable', 'mdpi', 'screen.png', path.basename(resources.defaultResource.src));
resourceMap[targetPath] = resources.defaultResource.src;
}
events.emit('verbose', 'Updating splash screens at ' + platformResourcesDir);
FileUpdater.updatePaths(
resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function cleanSplashes (projectRoot, projectConfig, platformResourcesDir) {
var resources = projectConfig.getSplashScreens('android');
if (resources.length > 0) {
var resourceMap = mapImageResources(projectRoot, platformResourcesDir, 'drawable', 'screen.png');
events.emit('verbose', 'Cleaning splash screens at ' + platformResourcesDir);
// No source paths are specified in the map, so updatePaths() will delete the target files.
FileUpdater.updatePaths(
resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
}
function updateIcons (cordovaProject, platformResourcesDir) {
const icons = cordovaProject.projectConfig.getIcons('android');
// Skip if there are no app defined icons in config.xml
if (icons.length === 0) {
events.emit('verbose', 'This app does not have launcher icons defined');
return;
}
// 1. loop icons determin if there is an error in the setup.
// 2. during initial loop, also setup for legacy support.
const errorMissingAttributes = [];
const errorLegacyIconNeeded = [];
let hasAdaptive = false;
icons.forEach((icon, key) => {
if (
(icon.background && !icon.foreground) ||
(!icon.background && icon.foreground) ||
(!icon.background && !icon.foreground && !icon.src)
) {
errorMissingAttributes.push(icon.density ? icon.density : 'size=' + (icon.height || icon.width));
}
if (icon.foreground) {
hasAdaptive = true;
if (
!icon.src &&
(
icon.foreground.startsWith('@color') ||
path.extname(path.basename(icon.foreground)) === '.xml'
)
) {
errorLegacyIconNeeded.push(icon.density ? icon.density : 'size=' + (icon.height || icon.width));
} else if (!icon.src) {
icons[key].src = icon.foreground;
}
}
});
const errorMessage = [];
if (errorMissingAttributes.length > 0) {
errorMessage.push('One of the following attributes are set but missing the other for the density type: ' + errorMissingAttributes.join(', ') + '. Please ensure that all require attributes are defined.');
}
if (errorLegacyIconNeeded.length > 0) {
errorMessage.push('For the following icons with the density of: ' + errorLegacyIconNeeded.join(', ') + ', adaptive foreground with a defined color or vector can not be used as a standard fallback icon for older Android devices. To support older Android environments, please provide a value for the src attribute.');
}
if (errorMessage.length > 0) {
throw new CordovaError(errorMessage.join(' '));
}
let resourceMap = Object.assign(
{},
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_background.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.xml'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_background.xml'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher.xml')
);
const preparedIcons = prepareIcons(icons);
if (hasAdaptive) {
resourceMap = updateIconResourceForAdaptive(preparedIcons, resourceMap, platformResourcesDir);
}
resourceMap = updateIconResourceForLegacy(preparedIcons, resourceMap, platformResourcesDir);
events.emit('verbose', 'Updating icons at ' + platformResourcesDir);
FileUpdater.updatePaths(resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function updateIconResourceForAdaptive (preparedIcons, resourceMap, platformResourcesDir) {
const android_icons = preparedIcons.android_icons;
const default_icon = preparedIcons.default_icon;
// The source paths for icons and splashes are relative to
// project's config.xml location, so we use it as base path.
let background;
let foreground;
let targetPathBackground;
let targetPathForeground;
for (const density in android_icons) {
let backgroundVal = '@mipmap/ic_launcher_background';
let foregroundVal = '@mipmap/ic_launcher_foreground';
background = android_icons[density].background;
foreground = android_icons[density].foreground;
if (!background || !foreground) {
// This icon isn't an adaptive icon, so skip it
continue;
}
if (background.startsWith('@color')) {
// Colors Use Case
backgroundVal = background; // Example: @color/background_foobar_1
} else if (path.extname(path.basename(background)) === '.xml') {
// Vector Use Case
targetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_background.xml', path.basename(android_icons[density].background));
resourceMap[targetPathBackground] = android_icons[density].background;
} else if (path.extname(path.basename(background)) === '.png') {
// Images Use Case
targetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_background.png', path.basename(android_icons[density].background));
resourceMap[targetPathBackground] = android_icons[density].background;
}
if (foreground.startsWith('@color')) {
// Colors Use Case
foregroundVal = foreground;
} else if (path.extname(path.basename(foreground)) === '.xml') {
// Vector Use Case
targetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_foreground.xml', path.basename(android_icons[density].foreground));
resourceMap[targetPathForeground] = android_icons[density].foreground;
} else if (path.extname(path.basename(foreground)) === '.png') {
// Images Use Case
targetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_foreground.png', path.basename(android_icons[density].foreground));
resourceMap[targetPathForeground] = android_icons[density].foreground;
}
// create an XML for DPI and set color
const icLauncherTemplate = `<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="` + backgroundVal + `" />
<foreground android:drawable="` + foregroundVal + `" />
</adaptive-icon>`;
const launcherXmlPath = path.join(platformResourcesDir, 'mipmap-' + density + '-v26', 'ic_launcher.xml');
// Remove the XML from the resourceMap so the file does not get removed.
delete resourceMap[launcherXmlPath];
fs.writeFileSync(path.resolve(launcherXmlPath), icLauncherTemplate);
}
// There's no "default" drawable, so assume default == mdpi.
if (default_icon && !android_icons.mdpi) {
let defaultTargetPathBackground;
let defaultTargetPathForeground;
if (background.startsWith('@color')) {
// Colors Use Case
targetPathBackground = default_icon.background;
} else if (path.extname(path.basename(background)) === '.xml') {
// Vector Use Case
defaultTargetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_background.xml', path.basename(default_icon.background));
resourceMap[defaultTargetPathBackground] = default_icon.background;
} else if (path.extname(path.basename(background)) === '.png') {
// Images Use Case
defaultTargetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_background.png', path.basename(default_icon.background));
resourceMap[defaultTargetPathBackground] = default_icon.background;
}
if (foreground.startsWith('@color')) {
// Colors Use Case
targetPathForeground = default_icon.foreground;
} else if (path.extname(path.basename(foreground)) === '.xml') {
// Vector Use Case
defaultTargetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_foreground.xml', path.basename(default_icon.foreground));
resourceMap[defaultTargetPathForeground] = default_icon.foreground;
} else if (path.extname(path.basename(foreground)) === '.png') {
// Images Use Case
defaultTargetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_foreground.png', path.basename(default_icon.foreground));
resourceMap[defaultTargetPathForeground] = default_icon.foreground;
}
}
return resourceMap;
}
function updateIconResourceForLegacy (preparedIcons, resourceMap, platformResourcesDir) {
const android_icons = preparedIcons.android_icons;
const default_icon = preparedIcons.default_icon;
// The source paths for icons and splashes are relative to
// project's config.xml location, so we use it as base path.
for (var density in android_icons) {
var targetPath = getImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher.png', path.basename(android_icons[density].src));
resourceMap[targetPath] = android_icons[density].src;
}
// There's no "default" drawable, so assume default == mdpi.
if (default_icon && !android_icons.mdpi) {
var defaultTargetPath = getImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher.png', path.basename(default_icon.src));
resourceMap[defaultTargetPath] = default_icon.src;
}
return resourceMap;
}
function prepareIcons (icons) {
// http://developer.android.com/design/style/iconography.html
const SIZE_TO_DENSITY_MAP = {
36: 'ldpi',
48: 'mdpi',
72: 'hdpi',
96: 'xhdpi',
144: 'xxhdpi',
192: 'xxxhdpi'
};
const android_icons = {};
let default_icon;
// find the best matching icon for a given density or size
// @output android_icons
var parseIcon = function (icon, icon_size) {
// do I have a platform icon for that density already
var density = icon.density || SIZE_TO_DENSITY_MAP[icon_size];
if (!density) {
// invalid icon defition ( or unsupported size)
return;
}
var previous = android_icons[density];
if (previous && previous.platform) {
return;
}
android_icons[density] = icon;
};
// iterate over all icon elements to find the default icon and call parseIcon
for (var i = 0; i < icons.length; i++) {
var icon = icons[i];
var size = icon.width;
if (!size) {
size = icon.height;
}
if (!size && !icon.density) {
if (default_icon) {
const found = {};
const favor = {};
// populating found icon.
if (icon.background && icon.foreground) {
found.background = icon.background;
found.foreground = icon.foreground;
}
if (icon.src) {
found.src = icon.src;
}
if (default_icon.background && default_icon.foreground) {
favor.background = default_icon.background;
favor.foreground = default_icon.foreground;
}
if (default_icon.src) {
favor.src = default_icon.src;
}
events.emit('verbose', 'Found extra default icon: ' + JSON.stringify(found) + ' and ignoring in favor of ' + JSON.stringify(favor) + '.');
} else {
default_icon = icon;
}
} else {
parseIcon(icon, size);
}
}
return {
android_icons: android_icons,
default_icon: default_icon
};
}
function cleanIcons (projectRoot, projectConfig, platformResourcesDir) {
var icons = projectConfig.getIcons('android');
// Skip if there are no app defined icons in config.xml
if (icons.length === 0) {
events.emit('verbose', 'This app does not have launcher icons defined');
return;
}
const resourceMap = Object.assign(
{},
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_background.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.xml'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_background.xml'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher.xml')
);
events.emit('verbose', 'Cleaning icons at ' + platformResourcesDir);
// No source paths are specified in the map, so updatePaths() will delete the target files.
FileUpdater.updatePaths(resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
/**
* Gets a map containing resources of a specified name from all drawable folders in a directory.
*/
function mapImageResources (rootDir, subDir, type, resourceName) {
const pathMap = {};
const pattern = new RegExp(type + '-.+');
utils.scanDirectory(path.join(rootDir, subDir), pattern).forEach(function (drawableFolder) {
const imagePath = path.join(subDir, path.basename(drawableFolder), resourceName);
pathMap[imagePath] = null;
});
return pathMap;
}
function updateFileResources (cordovaProject, platformDir) {
var files = cordovaProject.projectConfig.getFileResources('android');
// if there are resource-file elements in config.xml
if (files.length === 0) {
events.emit('verbose', 'This app does not have additional resource files defined');
return;
}
var resourceMap = {};
files.forEach(function (res) {
var targetPath = path.join(platformDir, res.target);
resourceMap[targetPath] = res.src;
});
events.emit('verbose', 'Updating resource files at ' + platformDir);
FileUpdater.updatePaths(
resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function cleanFileResources (projectRoot, projectConfig, platformDir) {
var files = projectConfig.getFileResources('android', true);
if (files.length > 0) {
events.emit('verbose', 'Cleaning resource files at ' + platformDir);
var resourceMap = {};
files.forEach(function (res) {
var filePath = path.join(platformDir, res.target);
resourceMap[filePath] = null;
});
FileUpdater.updatePaths(
resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
}
/**
* Gets and validates 'AndroidLaunchMode' prepference from config.xml. Returns
* preference value and warns if it doesn't seems to be valid
*
* @param {ConfigParser} platformConfig A configParser instance for
* platform.
*
* @return {String} Preference's value from config.xml or
* default value, if there is no such preference. The default value is
* 'singleTop'
*/
function findAndroidLaunchModePreference (platformConfig) {
var launchMode = platformConfig.getPreference('AndroidLaunchMode');
if (!launchMode) {
// Return a default value
return 'singleTop';
}
var expectedValues = ['standard', 'singleTop', 'singleTask', 'singleInstance'];
var valid = expectedValues.indexOf(launchMode) >= 0;
if (!valid) {
// Note: warn, but leave the launch mode as developer wanted, in case the list of options changes in the future
events.emit('warn', 'Unrecognized value for AndroidLaunchMode preference: ' +
launchMode + '. Expected values are: ' + expectedValues.join(', '));
}
return launchMode;
}
| bin/templates/cordova/lib/prepare.js | /**
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
var fs = require('fs-extra');
var path = require('path');
const nopt = require('nopt');
var events = require('cordova-common').events;
var AndroidManifest = require('./AndroidManifest');
var checkReqs = require('./check_reqs');
var xmlHelpers = require('cordova-common').xmlHelpers;
var CordovaError = require('cordova-common').CordovaError;
var ConfigParser = require('cordova-common').ConfigParser;
var FileUpdater = require('cordova-common').FileUpdater;
var PlatformJson = require('cordova-common').PlatformJson;
var PlatformMunger = require('cordova-common').ConfigChanges.PlatformMunger;
var PluginInfoProvider = require('cordova-common').PluginInfoProvider;
const utils = require('./utils');
const GradlePropertiesParser = require('./config/GradlePropertiesParser');
function parseArguments (argv) {
return nopt({
// `jvmargs` is a valid option however, we don't actually want to parse it because we want the entire string as is.
// jvmargs: String
}, {}, argv || [], 0);
}
module.exports.prepare = function (cordovaProject, options) {
var self = this;
let args = {};
if (options && options.options) {
args = parseArguments(options.options.argv);
}
var platformJson = PlatformJson.load(this.locations.root, this.platform);
var munger = new PlatformMunger(this.platform, this.locations.root, platformJson, new PluginInfoProvider());
this._config = updateConfigFilesFrom(cordovaProject.projectConfig, munger, this.locations);
// Get the min SDK version from config.xml
const minSdkVersion = this._config.getPreference('android-minSdkVersion', 'android');
const maxSdkVersion = this._config.getPreference('android-maxSdkVersion', 'android');
const targetSdkVersion = this._config.getPreference('android-targetSdkVersion', 'android');
const androidXEnabled = this._config.getPreference('AndroidXEnabled', 'android');
const isGradlePluginKotlinEnabled = this._config.getPreference('GradlePluginKotlinEnabled', 'android');
const gradlePluginKotlinCodeStyle = this._config.getPreference('GradlePluginKotlinCodeStyle', 'android');
const gradlePropertiesUserConfig = {};
if (minSdkVersion) gradlePropertiesUserConfig.cdvMinSdkVersion = minSdkVersion;
if (maxSdkVersion) gradlePropertiesUserConfig.cdvMaxSdkVersion = maxSdkVersion;
if (targetSdkVersion) gradlePropertiesUserConfig.cdvTargetSdkVersion = targetSdkVersion;
if (args.jvmargs) gradlePropertiesUserConfig['org.gradle.jvmargs'] = args.jvmargs;
if (isGradlePluginKotlinEnabled) {
gradlePropertiesUserConfig['kotlin.code.style'] = gradlePluginKotlinCodeStyle || 'official';
}
// Both 'useAndroidX' and 'enableJetifier' are linked together.
if (androidXEnabled) {
gradlePropertiesUserConfig['android.useAndroidX'] = androidXEnabled;
gradlePropertiesUserConfig['android.enableJetifier'] = androidXEnabled;
}
const gradlePropertiesParser = new GradlePropertiesParser(this.locations.root);
gradlePropertiesParser.configure(gradlePropertiesUserConfig);
// Update own www dir with project's www assets and plugins' assets and js-files
return Promise.resolve(updateWww(cordovaProject, this.locations)).then(function () {
// update project according to config.xml changes.
return updateProjectAccordingTo(self._config, self.locations);
}).then(function () {
updateIcons(cordovaProject, path.relative(cordovaProject.root, self.locations.res));
updateSplashes(cordovaProject, path.relative(cordovaProject.root, self.locations.res));
updateFileResources(cordovaProject, path.relative(cordovaProject.root, self.locations.root));
}).then(function () {
events.emit('verbose', 'Prepared android project successfully');
});
};
module.exports.clean = function (options) {
// A cordovaProject isn't passed into the clean() function, because it might have
// been called from the platform shell script rather than the CLI. Check for the
// noPrepare option passed in by the non-CLI clean script. If that's present, or if
// there's no config.xml found at the project root, then don't clean prepared files.
var projectRoot = path.resolve(this.root, '../..');
if ((options && options.noPrepare) || !fs.existsSync(this.locations.configXml) ||
!fs.existsSync(this.locations.configXml)) {
return Promise.resolve();
}
var projectConfig = new ConfigParser(this.locations.configXml);
var self = this;
return Promise.resolve().then(function () {
cleanWww(projectRoot, self.locations);
cleanIcons(projectRoot, projectConfig, path.relative(projectRoot, self.locations.res));
cleanSplashes(projectRoot, projectConfig, path.relative(projectRoot, self.locations.res));
cleanFileResources(projectRoot, projectConfig, path.relative(projectRoot, self.locations.root));
});
};
/**
* Updates config files in project based on app's config.xml and config munge,
* generated by plugins.
*
* @param {ConfigParser} sourceConfig A project's configuration that will
* be merged into platform's config.xml
* @param {ConfigChanges} configMunger An initialized ConfigChanges instance
* for this platform.
* @param {Object} locations A map of locations for this platform
*
* @return {ConfigParser} An instance of ConfigParser, that
* represents current project's configuration. When returned, the
* configuration is already dumped to appropriate config.xml file.
*/
function updateConfigFilesFrom (sourceConfig, configMunger, locations) {
events.emit('verbose', 'Generating platform-specific config.xml from defaults for android at ' + locations.configXml);
// First cleanup current config and merge project's one into own
// Overwrite platform config.xml with defaults.xml.
fs.copySync(locations.defaultConfigXml, locations.configXml);
// Then apply config changes from global munge to all config files
// in project (including project's config)
configMunger.reapply_global_munge().save_all();
events.emit('verbose', 'Merging project\'s config.xml into platform-specific android config.xml');
// Merge changes from app's config.xml into platform's one
var config = new ConfigParser(locations.configXml);
xmlHelpers.mergeXml(sourceConfig.doc.getroot(),
config.doc.getroot(), 'android', /* clobber= */true);
config.write();
return config;
}
/**
* Logs all file operations via the verbose event stream, indented.
*/
function logFileOp (message) {
events.emit('verbose', ' ' + message);
}
/**
* Updates platform 'www' directory by replacing it with contents of
* 'platform_www' and app www. Also copies project's overrides' folder into
* the platform 'www' folder
*
* @param {Object} cordovaProject An object which describes cordova project.
* @param {Object} destinations An object that contains destination
* paths for www files.
*/
function updateWww (cordovaProject, destinations) {
var sourceDirs = [
path.relative(cordovaProject.root, cordovaProject.locations.www),
path.relative(cordovaProject.root, destinations.platformWww)
];
// If project contains 'merges' for our platform, use them as another overrides
var merges_path = path.join(cordovaProject.root, 'merges', 'android');
if (fs.existsSync(merges_path)) {
events.emit('verbose', 'Found "merges/android" folder. Copying its contents into the android project.');
sourceDirs.push(path.join('merges', 'android'));
}
var targetDir = path.relative(cordovaProject.root, destinations.www);
events.emit(
'verbose', 'Merging and updating files from [' + sourceDirs.join(', ') + '] to ' + targetDir);
FileUpdater.mergeAndUpdateDir(
sourceDirs, targetDir, { rootDir: cordovaProject.root }, logFileOp);
}
/**
* Cleans all files from the platform 'www' directory.
*/
function cleanWww (projectRoot, locations) {
var targetDir = path.relative(projectRoot, locations.www);
events.emit('verbose', 'Cleaning ' + targetDir);
// No source paths are specified, so mergeAndUpdateDir() will clear the target directory.
FileUpdater.mergeAndUpdateDir(
[], targetDir, { rootDir: projectRoot, all: true }, logFileOp);
}
/**
* Updates project structure and AndroidManifest according to project's configuration.
*
* @param {ConfigParser} platformConfig A project's configuration that will
* be used to update project
* @param {Object} locations A map of locations for this platform
*/
function updateProjectAccordingTo (platformConfig, locations) {
// Update app name by editing res/values/strings.xml
var strings = xmlHelpers.parseElementtreeSync(locations.strings);
var name = platformConfig.name();
strings.find('string[@name="app_name"]').text = name.replace(/'/g, '\\\'');
var shortName = platformConfig.shortName && platformConfig.shortName();
if (shortName && shortName !== name) {
strings.find('string[@name="launcher_name"]').text = shortName.replace(/'/g, '\\\'');
}
fs.writeFileSync(locations.strings, strings.write({ indent: 4 }), 'utf-8');
events.emit('verbose', 'Wrote out android application name "' + name + '" to ' + locations.strings);
// Java packages cannot support dashes
var androidPkgName = (platformConfig.android_packageName() || platformConfig.packageName()).replace(/-/g, '_');
var manifest = new AndroidManifest(locations.manifest);
var manifestId = manifest.getPackageId();
manifest.getActivity()
.setOrientation(platformConfig.getPreference('orientation'))
.setLaunchMode(findAndroidLaunchModePreference(platformConfig));
manifest.setVersionName(platformConfig.version())
.setVersionCode(platformConfig.android_versionCode() || default_versionCode(platformConfig.version()))
.setPackageId(androidPkgName)
.write();
// Java file paths shouldn't be hard coded
const javaDirectory = path.join(locations.javaSrc, manifestId.replace(/\./g, '/'));
const javaPattern = /\.java$/;
const java_files = utils.scanDirectory(javaDirectory, javaPattern, true).filter(function (f) {
return utils.grep(f, /extends\s+CordovaActivity/g) !== null;
});
if (java_files.length === 0) {
throw new CordovaError('No Java files found that extend CordovaActivity.');
} else if (java_files.length > 1) {
events.emit('log', 'Multiple candidate Java files that extend CordovaActivity found. Guessing at the first one, ' + java_files[0]);
}
const destFile = java_files[0];
// var destFile = path.join(locations.root, 'app', 'src', 'main', 'java', androidPkgName.replace(/\./g, '/'), path.basename(java_files[0]));
// fs.ensureDirSync(path.dirname(destFile));
// events.emit('verbose', java_files[0]);
// events.emit('verbose', destFile);
// console.log(locations);
// fs.copySync(java_files[0], destFile);
utils.replaceFileContents(destFile, /package [\w.]*;/, 'package ' + androidPkgName + ';');
events.emit('verbose', 'Wrote out Android package name "' + androidPkgName + '" to ' + destFile);
var removeOrigPkg = checkReqs.isWindows() || checkReqs.isDarwin()
? manifestId.toUpperCase() !== androidPkgName.toUpperCase()
: manifestId !== androidPkgName;
if (removeOrigPkg) {
// If package was name changed we need to remove old java with main activity
fs.removeSync(java_files[0]);
// remove any empty directories
var currentDir = path.dirname(java_files[0]);
var sourcesRoot = path.resolve(locations.root, 'src');
while (currentDir !== sourcesRoot) {
if (fs.existsSync(currentDir) && fs.readdirSync(currentDir).length === 0) {
fs.rmdirSync(currentDir);
currentDir = path.resolve(currentDir, '..');
} else {
break;
}
}
}
}
// Consturct the default value for versionCode as
// PATCH + MINOR * 100 + MAJOR * 10000
// see http://developer.android.com/tools/publishing/versioning.html
function default_versionCode (version) {
var nums = version.split('-')[0].split('.');
var versionCode = 0;
if (+nums[0]) {
versionCode += +nums[0] * 10000;
}
if (+nums[1]) {
versionCode += +nums[1] * 100;
}
if (+nums[2]) {
versionCode += +nums[2];
}
events.emit('verbose', 'android-versionCode not found in config.xml. Generating a code based on version in config.xml (' + version + '): ' + versionCode);
return versionCode;
}
function getImageResourcePath (resourcesDir, type, density, name, sourceName) {
if (/\.9\.png$/.test(sourceName)) {
name = name.replace(/\.png$/, '.9.png');
}
var resourcePath = path.join(resourcesDir, (density ? type + '-' + density : type), name);
return resourcePath;
}
function getAdaptiveImageResourcePath (resourcesDir, type, density, name, sourceName) {
if (/\.9\.png$/.test(sourceName)) {
name = name.replace(/\.png$/, '.9.png');
}
var resourcePath = path.join(resourcesDir, (density ? type + '-' + density + '-v26' : type), name);
return resourcePath;
}
function updateSplashes (cordovaProject, platformResourcesDir) {
var resources = cordovaProject.projectConfig.getSplashScreens('android');
// if there are "splash" elements in config.xml
if (resources.length === 0) {
events.emit('verbose', 'This app does not have splash screens defined');
return;
}
var resourceMap = mapImageResources(cordovaProject.root, platformResourcesDir, 'drawable', 'screen.png');
var hadMdpi = false;
resources.forEach(function (resource) {
if (!resource.density) {
return;
}
if (resource.density === 'mdpi') {
hadMdpi = true;
}
var targetPath = getImageResourcePath(
platformResourcesDir, 'drawable', resource.density, 'screen.png', path.basename(resource.src));
resourceMap[targetPath] = resource.src;
});
// There's no "default" drawable, so assume default == mdpi.
if (!hadMdpi && resources.defaultResource) {
var targetPath = getImageResourcePath(
platformResourcesDir, 'drawable', 'mdpi', 'screen.png', path.basename(resources.defaultResource.src));
resourceMap[targetPath] = resources.defaultResource.src;
}
events.emit('verbose', 'Updating splash screens at ' + platformResourcesDir);
FileUpdater.updatePaths(
resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function cleanSplashes (projectRoot, projectConfig, platformResourcesDir) {
var resources = projectConfig.getSplashScreens('android');
if (resources.length > 0) {
var resourceMap = mapImageResources(projectRoot, platformResourcesDir, 'drawable', 'screen.png');
events.emit('verbose', 'Cleaning splash screens at ' + platformResourcesDir);
// No source paths are specified in the map, so updatePaths() will delete the target files.
FileUpdater.updatePaths(
resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
}
function updateIcons (cordovaProject, platformResourcesDir) {
const icons = cordovaProject.projectConfig.getIcons('android');
// Skip if there are no app defined icons in config.xml
if (icons.length === 0) {
events.emit('verbose', 'This app does not have launcher icons defined');
return;
}
// 1. loop icons determin if there is an error in the setup.
// 2. during initial loop, also setup for legacy support.
const errorMissingAttributes = [];
const errorLegacyIconNeeded = [];
let hasAdaptive = false;
icons.forEach((icon, key) => {
if (
(icon.background && !icon.foreground) ||
(!icon.background && icon.foreground) ||
(!icon.background && !icon.foreground && !icon.src)
) {
errorMissingAttributes.push(icon.density ? icon.density : 'size=' + (icon.height || icon.width));
}
if (icon.foreground) {
hasAdaptive = true;
if (
!icon.src &&
(
icon.foreground.startsWith('@color') ||
path.extname(path.basename(icon.foreground)) === '.xml'
)
) {
errorLegacyIconNeeded.push(icon.density ? icon.density : 'size=' + (icon.height || icon.width));
} else if (!icon.src) {
icons[key].src = icon.foreground;
}
}
});
const errorMessage = [];
if (errorMissingAttributes.length > 0) {
errorMessage.push('One of the following attributes are set but missing the other for the density type: ' + errorMissingAttributes.join(', ') + '. Please ensure that all require attributes are defined.');
}
if (errorLegacyIconNeeded.length > 0) {
errorMessage.push('For the following icons with the density of: ' + errorLegacyIconNeeded.join(', ') + ', adaptive foreground with a defined color or vector can not be used as a standard fallback icon for older Android devices. To support older Android environments, please provide a value for the src attribute.');
}
if (errorMessage.length > 0) {
throw new CordovaError(errorMessage.join(' '));
}
let resourceMap = Object.assign(
{},
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_background.png'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.xml'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher_background.xml'),
mapImageResources(cordovaProject.root, platformResourcesDir, 'mipmap', 'ic_launcher.xml')
);
const preparedIcons = prepareIcons(icons);
if (hasAdaptive) {
resourceMap = updateIconResourceForAdaptive(preparedIcons, resourceMap, platformResourcesDir);
}
resourceMap = updateIconResourceForLegacy(preparedIcons, resourceMap, platformResourcesDir);
events.emit('verbose', 'Updating icons at ' + platformResourcesDir);
FileUpdater.updatePaths(resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function updateIconResourceForAdaptive (preparedIcons, resourceMap, platformResourcesDir) {
const android_icons = preparedIcons.android_icons;
const default_icon = preparedIcons.default_icon;
// The source paths for icons and splashes are relative to
// project's config.xml location, so we use it as base path.
let background;
let foreground;
let targetPathBackground;
let targetPathForeground;
for (const density in android_icons) {
let backgroundVal = '@mipmap/ic_launcher_background';
let foregroundVal = '@mipmap/ic_launcher_foreground';
background = android_icons[density].background;
foreground = android_icons[density].foreground;
if (!background || !foreground) {
// This icon isn't an adaptive icon, so skip it
continue;
}
if (background.startsWith('@color')) {
// Colors Use Case
backgroundVal = background; // Example: @color/background_foobar_1
} else if (path.extname(path.basename(background)) === '.xml') {
// Vector Use Case
targetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_background.xml', path.basename(android_icons[density].background));
resourceMap[targetPathBackground] = android_icons[density].background;
} else if (path.extname(path.basename(background)) === '.png') {
// Images Use Case
targetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_background.png', path.basename(android_icons[density].background));
resourceMap[targetPathBackground] = android_icons[density].background;
}
if (foreground.startsWith('@color')) {
// Colors Use Case
foregroundVal = foreground;
} else if (path.extname(path.basename(foreground)) === '.xml') {
// Vector Use Case
targetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_foreground.xml', path.basename(android_icons[density].foreground));
resourceMap[targetPathForeground] = android_icons[density].foreground;
} else if (path.extname(path.basename(foreground)) === '.png') {
// Images Use Case
targetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher_foreground.png', path.basename(android_icons[density].foreground));
resourceMap[targetPathForeground] = android_icons[density].foreground;
}
// create an XML for DPI and set color
const icLauncherTemplate = `<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="` + backgroundVal + `" />
<foreground android:drawable="` + foregroundVal + `" />
</adaptive-icon>`;
const launcherXmlPath = path.join(platformResourcesDir, 'mipmap-' + density + '-v26', 'ic_launcher.xml');
// Remove the XML from the resourceMap so the file does not get removed.
delete resourceMap[launcherXmlPath];
fs.writeFileSync(path.resolve(launcherXmlPath), icLauncherTemplate);
}
// There's no "default" drawable, so assume default == mdpi.
if (default_icon && !android_icons.mdpi) {
let defaultTargetPathBackground;
let defaultTargetPathForeground;
if (background.startsWith('@color')) {
// Colors Use Case
targetPathBackground = default_icon.background;
} else if (path.extname(path.basename(background)) === '.xml') {
// Vector Use Case
defaultTargetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_background.xml', path.basename(default_icon.background));
resourceMap[defaultTargetPathBackground] = default_icon.background;
} else if (path.extname(path.basename(background)) === '.png') {
// Images Use Case
defaultTargetPathBackground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_background.png', path.basename(default_icon.background));
resourceMap[defaultTargetPathBackground] = default_icon.background;
}
if (foreground.startsWith('@color')) {
// Colors Use Case
targetPathForeground = default_icon.foreground;
} else if (path.extname(path.basename(foreground)) === '.xml') {
// Vector Use Case
defaultTargetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_foreground.xml', path.basename(default_icon.foreground));
resourceMap[defaultTargetPathForeground] = default_icon.foreground;
} else if (path.extname(path.basename(foreground)) === '.png') {
// Images Use Case
defaultTargetPathForeground = getAdaptiveImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher_foreground.png', path.basename(default_icon.foreground));
resourceMap[defaultTargetPathForeground] = default_icon.foreground;
}
}
return resourceMap;
}
function updateIconResourceForLegacy (preparedIcons, resourceMap, platformResourcesDir) {
const android_icons = preparedIcons.android_icons;
const default_icon = preparedIcons.default_icon;
// The source paths for icons and splashes are relative to
// project's config.xml location, so we use it as base path.
for (var density in android_icons) {
var targetPath = getImageResourcePath(platformResourcesDir, 'mipmap', density, 'ic_launcher.png', path.basename(android_icons[density].src));
resourceMap[targetPath] = android_icons[density].src;
}
// There's no "default" drawable, so assume default == mdpi.
if (default_icon && !android_icons.mdpi) {
var defaultTargetPath = getImageResourcePath(platformResourcesDir, 'mipmap', 'mdpi', 'ic_launcher.png', path.basename(default_icon.src));
resourceMap[defaultTargetPath] = default_icon.src;
}
return resourceMap;
}
function prepareIcons (icons) {
// http://developer.android.com/design/style/iconography.html
const SIZE_TO_DENSITY_MAP = {
36: 'ldpi',
48: 'mdpi',
72: 'hdpi',
96: 'xhdpi',
144: 'xxhdpi',
192: 'xxxhdpi'
};
const android_icons = {};
let default_icon;
// find the best matching icon for a given density or size
// @output android_icons
var parseIcon = function (icon, icon_size) {
// do I have a platform icon for that density already
var density = icon.density || SIZE_TO_DENSITY_MAP[icon_size];
if (!density) {
// invalid icon defition ( or unsupported size)
return;
}
var previous = android_icons[density];
if (previous && previous.platform) {
return;
}
android_icons[density] = icon;
};
// iterate over all icon elements to find the default icon and call parseIcon
for (var i = 0; i < icons.length; i++) {
var icon = icons[i];
var size = icon.width;
if (!size) {
size = icon.height;
}
if (!size && !icon.density) {
if (default_icon) {
const found = {};
const favor = {};
// populating found icon.
if (icon.background && icon.foreground) {
found.background = icon.background;
found.foreground = icon.foreground;
}
if (icon.src) {
found.src = icon.src;
}
if (default_icon.background && default_icon.foreground) {
favor.background = default_icon.background;
favor.foreground = default_icon.foreground;
}
if (default_icon.src) {
favor.src = default_icon.src;
}
events.emit('verbose', 'Found extra default icon: ' + JSON.stringify(found) + ' and ignoring in favor of ' + JSON.stringify(favor) + '.');
} else {
default_icon = icon;
}
} else {
parseIcon(icon, size);
}
}
return {
android_icons: android_icons,
default_icon: default_icon
};
}
function cleanIcons (projectRoot, projectConfig, platformResourcesDir) {
var icons = projectConfig.getIcons('android');
// Skip if there are no app defined icons in config.xml
if (icons.length === 0) {
events.emit('verbose', 'This app does not have launcher icons defined');
return;
}
const resourceMap = Object.assign(
{},
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_background.png'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_foreground.xml'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher_background.xml'),
mapImageResources(projectRoot, platformResourcesDir, 'mipmap', 'ic_launcher.xml')
);
events.emit('verbose', 'Cleaning icons at ' + platformResourcesDir);
// No source paths are specified in the map, so updatePaths() will delete the target files.
FileUpdater.updatePaths(resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
/**
* Gets a map containing resources of a specified name from all drawable folders in a directory.
*/
function mapImageResources (rootDir, subDir, type, resourceName) {
const pathMap = {};
const pattern = new RegExp(type + '+-.+');
utils.scanDirectory(path.join(rootDir, subDir), pattern).forEach(function (drawableFolder) {
const imagePath = path.join(subDir, path.basename(drawableFolder), resourceName);
pathMap[imagePath] = null;
});
return pathMap;
}
function updateFileResources (cordovaProject, platformDir) {
var files = cordovaProject.projectConfig.getFileResources('android');
// if there are resource-file elements in config.xml
if (files.length === 0) {
events.emit('verbose', 'This app does not have additional resource files defined');
return;
}
var resourceMap = {};
files.forEach(function (res) {
var targetPath = path.join(platformDir, res.target);
resourceMap[targetPath] = res.src;
});
events.emit('verbose', 'Updating resource files at ' + platformDir);
FileUpdater.updatePaths(
resourceMap, { rootDir: cordovaProject.root }, logFileOp);
}
function cleanFileResources (projectRoot, projectConfig, platformDir) {
var files = projectConfig.getFileResources('android', true);
if (files.length > 0) {
events.emit('verbose', 'Cleaning resource files at ' + platformDir);
var resourceMap = {};
files.forEach(function (res) {
var filePath = path.join(platformDir, res.target);
resourceMap[filePath] = null;
});
FileUpdater.updatePaths(
resourceMap, { rootDir: projectRoot, all: true }, logFileOp);
}
}
/**
* Gets and validates 'AndroidLaunchMode' prepference from config.xml. Returns
* preference value and warns if it doesn't seems to be valid
*
* @param {ConfigParser} platformConfig A configParser instance for
* platform.
*
* @return {String} Preference's value from config.xml or
* default value, if there is no such preference. The default value is
* 'singleTop'
*/
function findAndroidLaunchModePreference (platformConfig) {
var launchMode = platformConfig.getPreference('AndroidLaunchMode');
if (!launchMode) {
// Return a default value
return 'singleTop';
}
var expectedValues = ['standard', 'singleTop', 'singleTask', 'singleInstance'];
var valid = expectedValues.indexOf(launchMode) >= 0;
if (!valid) {
// Note: warn, but leave the launch mode as developer wanted, in case the list of options changes in the future
events.emit('warn', 'Unrecognized value for AndroidLaunchMode preference: ' +
launchMode + '. Expected values are: ' + expectedValues.join(', '));
}
return launchMode;
}
| fix(prepare): fix pattern used to collect image resources (#1084)
The pattern contained an additional plus that slipped in during the
refactoring done in #842. See [the diff][1] for details.
[1]: https://github.com/apache/cordova-android/pull/842/commits/09e8248d1f0bbf5c833765e71dbf2343c38cc6bf#diff-26c51bfaa44eff1e46fd61ec3225ec13L640-R650 | bin/templates/cordova/lib/prepare.js | fix(prepare): fix pattern used to collect image resources (#1084) | <ide><path>in/templates/cordova/lib/prepare.js
<ide> */
<ide> function mapImageResources (rootDir, subDir, type, resourceName) {
<ide> const pathMap = {};
<del> const pattern = new RegExp(type + '+-.+');
<add> const pattern = new RegExp(type + '-.+');
<ide> utils.scanDirectory(path.join(rootDir, subDir), pattern).forEach(function (drawableFolder) {
<ide> const imagePath = path.join(subDir, path.basename(drawableFolder), resourceName);
<ide> pathMap[imagePath] = null; |
|
Java | apache-2.0 | 1f1f4fc0d4e1fb1b0431d2eac058fb9b408c39e8 | 0 | CrocusJava/battleofrotterdam,CrocusJava/battleofrotterdam | /**
*
*/
package com.battleejb.ejbbeans;
import java.util.Date;
import java.util.List;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceException;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.Order;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import com.battleejb.entities.Competition;
import com.battleejb.entities.CompetitionType;
import com.battleejb.entities.CompetitionType_;
import com.battleejb.entities.Competition_;
import com.battleejb.entities.Project;
import com.battleejb.entities.Project_;
import com.battleejb.entities.User_;
/**
* @author marina
*
*/
@Stateless
@LocalBean
public class CompetitionBean extends AbstractFacade<Competition> {
private static final String SORT_TYPE_ASC = "asc";
private static final String SORT_TYPE_DESC = "desc";
private static final String ORDER_BY_START_DATE = "startdate";
private static final String ORDER_BY_END_DATE = "enddate";
private static final String ORDER_BY_REG_DEADLINE = "regdeadline";
@PersistenceContext(unitName = "persistence")
EntityManager em;
public CompetitionBean() {
super(Competition.class);
}
@Override
protected EntityManager getEntityManager() {
return em;
}
public Competition getCurrentCompetitionByType(CompetitionType type,
Date currentDate) {
Competition competition = null;
try {
competition = em
.createNamedQuery(
"Competition.findCurrentCompetitionByType",
Competition.class).setParameter("type", type)
.setParameter("currentDate", currentDate).getSingleResult();
} catch (PersistenceException e) {
e.printStackTrace();
}
return competition;
}
public Competition getCurrentCompetitionByType(String type) {
Competition competition = null;
try {
competition = em
.createNamedQuery(
"Competition.findCurrentCompetitionByTypeName",
Competition.class).setParameter("type", type)
.getSingleResult();
} catch (PersistenceException e) {
e.printStackTrace();
}
return competition;
}
public List<Competition> findFilterOrderByDateLimit(String orderBy,
String sort, Date startDateFrom, Date startDateTo,
Date endDateFrom, Date endDateTo, Date regDeadlineFrom,
Date regDeadlineTo, Integer id, Integer winnerId, String type,
int firstPosition, int size) {
List<Competition> competitions = null;
try {
CriteriaBuilder cb = em.getCriteriaBuilder();
CriteriaQuery<Competition> cq = cb.createQuery(Competition.class);
Root<Competition> p = cq.from(Competition.class);
Predicate predicate = null;
if (startDateFrom != null) {
predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
p.get(Competition_.dateStart), startDateFrom));
}
if (startDateTo != null) {
predicate = cb.and(predicate, cb.lessThanOrEqualTo(
p.get(Competition_.dateStart), startDateTo));
}
if (endDateFrom != null) {
predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
p.get(Competition_.dateEnd), endDateFrom));
}
if (endDateTo != null) {
predicate = cb.and(predicate, cb.lessThanOrEqualTo(
p.get(Competition_.dateEnd), endDateTo));
}
if (regDeadlineFrom != null) {
predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
p.get(Competition_.registerDeadline), regDeadlineFrom));
}
if (regDeadlineTo != null) {
predicate = cb.and(predicate, cb.lessThanOrEqualTo(
p.get(Competition_.registerDeadline), regDeadlineTo));
}
if (id != null) {
predicate = cb.and(predicate,
cb.equal(p.get(Competition_.id), id));
}
if (type != null) {
predicate = cb.and(
predicate,
cb.equal(
p.get(Competition_.type).get(
CompetitionType_.name), type));
}
if (winnerId != null) {
predicate = cb.and(predicate, cb.equal(p.get(Competition_.user)
.get(User_.id), winnerId));
}
if (predicate != null) {
cq.where(predicate);
}
Expression<?> ex = null;
if (orderBy.equals(ORDER_BY_START_DATE)) {
ex = p.get(Competition_.dateStart);
} else if (orderBy.equals(ORDER_BY_END_DATE)) {
ex = p.get(Competition_.dateEnd);
} else if (orderBy.equals(ORDER_BY_REG_DEADLINE)) {
ex = p.get(Competition_.registerDeadline);
}
Order order = null;
if (sort.equals(SORT_TYPE_ASC)) {
order = cb.asc(ex);
} else {
order = cb.desc(ex);
}
cq.orderBy(order);
competitions = em.createQuery(cq).setFirstResult(firstPosition)
.setMaxResults(size).getResultList();
} catch (PersistenceException e) {
e.printStackTrace();
}
return competitions;
}
}
| back/battleEJB/ejbModule/com/battleejb/ejbbeans/CompetitionBean.java | /**
*
*/
package com.battleejb.ejbbeans;
import java.util.Date;
import javax.ejb.LocalBean;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceException;
import com.battleejb.entities.Competition;
import com.battleejb.entities.CompetitionType;
/**
* @author marina
*
*/
@Stateless
@LocalBean
public class CompetitionBean extends AbstractFacade<Competition> {
@PersistenceContext(unitName = "persistence")
EntityManager em;
public CompetitionBean() {
super(Competition.class);
}
@Override
protected EntityManager getEntityManager() {
return em;
}
public Competition getCurrentCompetitionByType(CompetitionType type, Date currentDate){
Competition competition = null;
try {
competition = em.createNamedQuery("Competition.findCurrentCompetitionByType", Competition.class)
.setParameter("type", type)
.setParameter("currentDate", currentDate)
.getSingleResult();
} catch (PersistenceException e) {
e.printStackTrace();
}
return competition;
}
public Competition getCurrentCompetitionByType(String type){
Competition competition = null;
try {
competition = em.createNamedQuery("Competition.findCurrentCompetitionByTypeName", Competition.class)
.setParameter("type", type)
.getSingleResult();
} catch (PersistenceException e) {
e.printStackTrace();
}
return competition;
}
}
| Added required method for competitions command to CompetitionBean.java
| back/battleEJB/ejbModule/com/battleejb/ejbbeans/CompetitionBean.java | Added required method for competitions command to CompetitionBean.java | <ide><path>ack/battleEJB/ejbModule/com/battleejb/ejbbeans/CompetitionBean.java
<ide> package com.battleejb.ejbbeans;
<ide>
<ide> import java.util.Date;
<add>import java.util.List;
<ide>
<ide> import javax.ejb.LocalBean;
<ide> import javax.ejb.Stateless;
<ide> import javax.persistence.EntityManager;
<ide> import javax.persistence.PersistenceContext;
<ide> import javax.persistence.PersistenceException;
<add>import javax.persistence.criteria.CriteriaBuilder;
<add>import javax.persistence.criteria.CriteriaQuery;
<add>import javax.persistence.criteria.Expression;
<add>import javax.persistence.criteria.Order;
<add>import javax.persistence.criteria.Predicate;
<add>import javax.persistence.criteria.Root;
<ide>
<ide> import com.battleejb.entities.Competition;
<ide> import com.battleejb.entities.CompetitionType;
<add>import com.battleejb.entities.CompetitionType_;
<add>import com.battleejb.entities.Competition_;
<add>import com.battleejb.entities.Project;
<add>import com.battleejb.entities.Project_;
<add>import com.battleejb.entities.User_;
<ide>
<ide> /**
<ide> * @author marina
<del> *
<add> *
<ide> */
<ide>
<ide> @Stateless
<ide> @LocalBean
<ide> public class CompetitionBean extends AbstractFacade<Competition> {
<ide>
<add> private static final String SORT_TYPE_ASC = "asc";
<add> private static final String SORT_TYPE_DESC = "desc";
<add> private static final String ORDER_BY_START_DATE = "startdate";
<add> private static final String ORDER_BY_END_DATE = "enddate";
<add> private static final String ORDER_BY_REG_DEADLINE = "regdeadline";
<add>
<ide> @PersistenceContext(unitName = "persistence")
<ide> EntityManager em;
<del>
<add>
<ide> public CompetitionBean() {
<ide> super(Competition.class);
<ide> }
<del>
<add>
<ide> @Override
<ide> protected EntityManager getEntityManager() {
<ide> return em;
<ide> }
<del>
<del> public Competition getCurrentCompetitionByType(CompetitionType type, Date currentDate){
<add>
<add> public Competition getCurrentCompetitionByType(CompetitionType type,
<add> Date currentDate) {
<ide> Competition competition = null;
<ide> try {
<del> competition = em.createNamedQuery("Competition.findCurrentCompetitionByType", Competition.class)
<del> .setParameter("type", type)
<del> .setParameter("currentDate", currentDate)
<add> competition = em
<add> .createNamedQuery(
<add> "Competition.findCurrentCompetitionByType",
<add> Competition.class).setParameter("type", type)
<add> .setParameter("currentDate", currentDate).getSingleResult();
<add> } catch (PersistenceException e) {
<add> e.printStackTrace();
<add> }
<add> return competition;
<add> }
<add>
<add> public Competition getCurrentCompetitionByType(String type) {
<add> Competition competition = null;
<add> try {
<add> competition = em
<add> .createNamedQuery(
<add> "Competition.findCurrentCompetitionByTypeName",
<add> Competition.class).setParameter("type", type)
<ide> .getSingleResult();
<ide> } catch (PersistenceException e) {
<ide> e.printStackTrace();
<ide> }
<ide> return competition;
<ide> }
<del>
<del> public Competition getCurrentCompetitionByType(String type){
<del> Competition competition = null;
<add>
<add> public List<Competition> findFilterOrderByDateLimit(String orderBy,
<add> String sort, Date startDateFrom, Date startDateTo,
<add> Date endDateFrom, Date endDateTo, Date regDeadlineFrom,
<add> Date regDeadlineTo, Integer id, Integer winnerId, String type,
<add> int firstPosition, int size) {
<add> List<Competition> competitions = null;
<ide> try {
<del> competition = em.createNamedQuery("Competition.findCurrentCompetitionByTypeName", Competition.class)
<del> .setParameter("type", type)
<del> .getSingleResult();
<add> CriteriaBuilder cb = em.getCriteriaBuilder();
<add> CriteriaQuery<Competition> cq = cb.createQuery(Competition.class);
<add> Root<Competition> p = cq.from(Competition.class);
<add>
<add> Predicate predicate = null;
<add>
<add> if (startDateFrom != null) {
<add> predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
<add> p.get(Competition_.dateStart), startDateFrom));
<add> }
<add> if (startDateTo != null) {
<add> predicate = cb.and(predicate, cb.lessThanOrEqualTo(
<add> p.get(Competition_.dateStart), startDateTo));
<add> }
<add> if (endDateFrom != null) {
<add> predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
<add> p.get(Competition_.dateEnd), endDateFrom));
<add> }
<add> if (endDateTo != null) {
<add> predicate = cb.and(predicate, cb.lessThanOrEqualTo(
<add> p.get(Competition_.dateEnd), endDateTo));
<add> }
<add> if (regDeadlineFrom != null) {
<add> predicate = cb.and(predicate, cb.greaterThanOrEqualTo(
<add> p.get(Competition_.registerDeadline), regDeadlineFrom));
<add> }
<add> if (regDeadlineTo != null) {
<add> predicate = cb.and(predicate, cb.lessThanOrEqualTo(
<add> p.get(Competition_.registerDeadline), regDeadlineTo));
<add> }
<add> if (id != null) {
<add> predicate = cb.and(predicate,
<add> cb.equal(p.get(Competition_.id), id));
<add> }
<add> if (type != null) {
<add> predicate = cb.and(
<add> predicate,
<add> cb.equal(
<add> p.get(Competition_.type).get(
<add> CompetitionType_.name), type));
<add> }
<add> if (winnerId != null) {
<add> predicate = cb.and(predicate, cb.equal(p.get(Competition_.user)
<add> .get(User_.id), winnerId));
<add> }
<add> if (predicate != null) {
<add> cq.where(predicate);
<add> }
<add>
<add> Expression<?> ex = null;
<add> if (orderBy.equals(ORDER_BY_START_DATE)) {
<add> ex = p.get(Competition_.dateStart);
<add> } else if (orderBy.equals(ORDER_BY_END_DATE)) {
<add> ex = p.get(Competition_.dateEnd);
<add> } else if (orderBy.equals(ORDER_BY_REG_DEADLINE)) {
<add> ex = p.get(Competition_.registerDeadline);
<add> }
<add>
<add> Order order = null;
<add> if (sort.equals(SORT_TYPE_ASC)) {
<add> order = cb.asc(ex);
<add> } else {
<add> order = cb.desc(ex);
<add> }
<add> cq.orderBy(order);
<add> competitions = em.createQuery(cq).setFirstResult(firstPosition)
<add> .setMaxResults(size).getResultList();
<add>
<ide> } catch (PersistenceException e) {
<ide> e.printStackTrace();
<ide> }
<del> return competition;
<add> return competitions;
<ide> }
<ide> } |
|
Java | epl-1.0 | 9d1cfbd36538ea664ae00b26d5ac91ba48ae85c2 | 0 | ESSICS/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio | package org.csstudio.utility.pv;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.Platform;
/** PV Factory
* <p>
* Locates the one and only expected implementation of the IPVFactory
* via an extension to the pvfactory extension point
* and creates the PV through it.
*
* <pre>
// Create PV
final PV pv = PVFactory.createPV(pv_name);
// Register listener for updates
pv.addListener(new PVListener()
{
public void pvDisconnected(PV pv)
{
System.out.println(pv.getName() + " is disconnected");
}
public void pvValueUpdate(PV pv)
{
IValue value = pv.getValue();
System.out.println(pv.getName() + " = " + value);
if (value instanceof IDoubleValue)
{
IDoubleValue dbl = (IDoubleValue) value;
System.out.println(dbl.getValue());
}
// ... or use ValueUtil
}
});
// Start the PV
pv.start();
...
pv.stop();
</pre>
*
* @author Kay Kasemir
*/
@SuppressWarnings("nls")
public class PVFactory
{
/** Separator between PV type indicator and rest of PV name.
* <p>
* This one is URL-ish, and works OK with EPICS PVs because
* those are unlikely to contain "://" themself, while
* just ":" for example is likely to be inside the PV name
*/
final public static String SEPARATOR = "://";
/** ID of the extension point */
final private static String PVFACTORY_EXT_ID =
"org.csstudio.utility.pv.pvfactory";
/** Lazyly intialized PV factories found in extension registry */
private static Map<String, IPVFactory> pv_factory = null;
/** Default PV type, initiliazed from preferences */
private static String default_type;
/** Initialize from preferences and extension point registry */
final private static void initialize() throws Exception
{
// Get default type from preferences
default_type = Preferences.getDefaultType();
// Get extension point info from registry
pv_factory = new HashMap<String, IPVFactory>();
final IConfigurationElement[] configs = Platform.getExtensionRegistry()
.getConfigurationElementsFor(PVFACTORY_EXT_ID);
// Allow one and only implementation
if (configs.length < 1)
throw new Exception("No extensions to " + PVFACTORY_EXT_ID + " found");
for (IConfigurationElement config : configs)
{
final String plugin = config.getContributor().getName();
final String name = config.getAttribute("name");
final String prefix = config.getAttribute("prefix");
final IPVFactory factory = (IPVFactory) config.createExecutableExtension("class");
Plugin.getLogger().debug(plugin + " provides '" + name +
"', prefix '" + prefix + "'");
pv_factory.put(prefix, factory);
}
}
/** @return Supported PV type prefixes */
final public static String[] getSupportedPrefixes() throws Exception
{
if (pv_factory == null)
initialize();
final ArrayList<String> prefixes = new ArrayList<String>();
final Iterator<String> iterator = pv_factory.keySet().iterator();
while (iterator.hasNext())
prefixes.add(iterator.next());
return (String[]) prefixes.toArray(new String[prefixes.size()]);
}
/** Create a PV for the given channel name, using the PV factory
* selected via the prefix of the channel name, or the default
* PV factory if no prefix is included in the channel name.
*
* @param name Channel name, format "prefix://name" or just "name"
* @return PV
* @exception Exception on error
*/
final public static PV createPV(final String name) throws Exception
{
if (pv_factory == null)
initialize();
// Identify type of PV
// PV name = "type:...."
final String type, base;
final int sep = name.indexOf(SEPARATOR);
if (sep > 0)
{
type = name.substring(0, sep);
base = name.substring(sep+SEPARATOR.length());
}
else
{
type = default_type;
base = name;
}
final IPVFactory factory = pv_factory.get(type);
if (factory == null)
throw new Exception("Unknown PV type in PV " + name);
return factory.createPV(base);
}
}
| applications/plugins/org.csstudio.utility.pv/src/org/csstudio/utility/pv/PVFactory.java | package org.csstudio.utility.pv;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.eclipse.core.runtime.IConfigurationElement;
import org.eclipse.core.runtime.Platform;
/** PV Factory
* <p>
* Locates the one and only expected implementation of the IPVFactory
* via an extension to the pvfactory extension point
* and creates the PV through it.
*
* <pre>
// Create PV
final PV pv = PVFactory.createPV(pv_name);
// Register listener for updates
pv.addListener(new PVListener()
{
public void pvDisconnected(PV pv)
{
System.out.println(pv.getName() + " is disconnected");
}
public void pvValueUpdate(PV pv)
{
IValue value = pv.getValue();
System.out.println(pv.getName() + " = " + value);
if (value instanceof IDoubleValue)
{
IDoubleValue dbl = (IDoubleValue) value;
System.out.println(dbl.getValue());
}
// ... or use ValueUtil
}
});
// Start the PV
pv.start();
...
pv.stop();
</pre>
*
* @author Kay Kasemir
*/
@SuppressWarnings("nls")
public class PVFactory
{
/** Separator between PV type indicator and rest of PV name.
* <p>
* This one is URL-ish, and works OK with EPICS PVs because
* those are unlikely to contain "://" themself, while
* just ":" for example is likely to be inside the PV name
*/
final public static String SEPARATOR = "://";
/** ID of the extension point */
final private static String PVFACTORY_EXT_ID =
"org.csstudio.utility.pv.pvfactory";
/** Lazyly intialized PV factories found in extension registry */
private static Map<String, IPVFactory> pv_factory = null;
/** Default PV type, initiliazed from preferences */
private static String default_type;
/** Initialize from preferences and extension point registry */
final private static void initialize() throws Exception
{
// Get default type from preferences
default_type = Preferences.getDefaultType();
// Get extension point info from registry
pv_factory = new HashMap<String, IPVFactory>();
final IConfigurationElement[] configs = Platform.getExtensionRegistry()
.getConfigurationElementsFor(PVFACTORY_EXT_ID);
// Allow one and only implementation
if (configs.length < 1)
throw new Exception("No extensions to " + PVFACTORY_EXT_ID + " found");
for (IConfigurationElement config : configs)
{
final String plugin = config.getContributor().getName();
final String name = config.getAttribute("name");
final String prefix = config.getAttribute("prefix");
final IPVFactory factory = (IPVFactory) config.createExecutableExtension("class");
Plugin.getLogger().debug(plugin + " provides '" + name +
"', prefix '" + prefix);
pv_factory.put(prefix, factory);
}
}
/** @return Supported PV type prefixes */
final public static String[] getSupportedPrefixes() throws Exception
{
if (pv_factory == null)
initialize();
final ArrayList<String> prefixes = new ArrayList<String>();
final Iterator<String> iterator = pv_factory.keySet().iterator();
while (iterator.hasNext())
prefixes.add(iterator.next());
return (String[]) prefixes.toArray(new String[prefixes.size()]);
}
/** Create a PV for the given channel name, using the PV factory
* selected via the prefix of the channel name, or the default
* PV factory if no prefix is included in the channel name.
*
* @param name Channel name, format "prefix://name" or just "name"
* @return PV
* @exception Exception on error
*/
final public static PV createPV(final String name) throws Exception
{
if (pv_factory == null)
initialize();
// Identify type of PV
// PV name = "type:...."
final String type, base;
final int sep = name.indexOf(SEPARATOR);
if (sep > 0)
{
type = name.substring(0, sep);
base = name.substring(sep+SEPARATOR.length());
}
else
{
type = default_type;
base = name;
}
final IPVFactory factory = pv_factory.get(type);
if (factory == null)
throw new Exception("Unknown PV type in PV " + name);
return factory.createPV(base);
}
}
| *** empty log message ***
| applications/plugins/org.csstudio.utility.pv/src/org/csstudio/utility/pv/PVFactory.java | *** empty log message *** | <ide><path>pplications/plugins/org.csstudio.utility.pv/src/org/csstudio/utility/pv/PVFactory.java
<ide> final String prefix = config.getAttribute("prefix");
<ide> final IPVFactory factory = (IPVFactory) config.createExecutableExtension("class");
<ide> Plugin.getLogger().debug(plugin + " provides '" + name +
<del> "', prefix '" + prefix);
<add> "', prefix '" + prefix + "'");
<ide> pv_factory.put(prefix, factory);
<ide> }
<ide> } |
|
Java | mit | 37b2410be0e4a24989d4271943b1520a0cbd9347 | 0 | DaedalusGame/Aura-Cascade,Mazdallier/Aura-Cascade,pixlepix/Aura-Cascade,pixlepix/Aura-Cascade,Mazdallier/Aura-Cascade,DaedalusGame/Aura-Cascade | package pixlepix.auracascade.item;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ChatComponentText;
import net.minecraft.world.World;
import pixlepix.auracascade.main.EnumColor;
import pixlepix.auracascade.registry.CraftingBenchRecipe;
import pixlepix.auracascade.registry.ITTinkererItem;
import pixlepix.auracascade.registry.ThaumicTinkererRecipe;
import java.util.ArrayList;
import java.util.List;
/**
* Created by localmacaccount on 5/16/15.
*/
public class ItemPrismaticWand extends Item implements ITTinkererItem {
public static String[] modes = new String[]{EnumColor.AQUA + "Selection", EnumColor.YELLOW + "Copy", EnumColor.ORANGE + "Paste"};
public ItemPrismaticWand() {
super();
setMaxStackSize(1);
}
@Override
public boolean onItemUseFirst(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int p_77648_7_, float p_77648_8_, float p_77648_9_, float p_77648_10_) {
//More specific selections
if (!player.isSneaking() && !world.isRemote) {
if (stack.stackTagCompound == null) {
stack.stackTagCompound = new NBTTagCompound();
}
NBTTagCompound nbt = stack.stackTagCompound;
if (stack.getItemDamage() == 0) {
if (nbt.hasKey("x1")) {
nbt.setInteger("x2", nbt.getInteger("x1"));
nbt.setInteger("y2", nbt.getInteger("y1"));
nbt.setInteger("z2", nbt.getInteger("z1"));
}
nbt.setInteger("x1", x);
nbt.setInteger("y1", y);
nbt.setInteger("z1", z);
player.addChatComponentMessage(new ChatComponentText("Position set"));
return true;
}
}
return false;
}
@Override
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean p_77624_4_) {
super.addInformation(stack, player, list, p_77624_4_);
list.add(modes[stack.getItemDamage()]);
}
@Override
public ItemStack onItemRightClick(ItemStack stack, World world, EntityPlayer player) {
int mode = stack.getItemDamage();
if (player.isSneaking()) {
NBTTagCompound nbt = stack.stackTagCompound;
mode++;
mode = mode % modes.length;
stack.setItemDamage(mode);
stack.stackTagCompound = nbt;
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Switched to: " + modes[mode]));
}
} else {
if (stack.stackTagCompound == null) {
stack.stackTagCompound = new NBTTagCompound();
}
NBTTagCompound nbt = stack.stackTagCompound;
switch (mode) {
/*
case 0:
//Make sure onItemUseFirst hasn't already grabbed it
if (nbt.hasKey("x1")) {
nbt.setInteger("x2", nbt.getInteger("x1"));
nbt.setInteger("y2", nbt.getInteger("y1"));
nbt.setInteger("z2", nbt.getInteger("z1"));
}
nbt.setInteger("x1", (int) player.posX);
nbt.setInteger("y1", (int) player.posY);
nbt.setInteger("z1", (int) player.posZ);
player.addChatComponentMessage(new ChatComponentText("Position set"));
break;
*/
case 1:
if (nbt.hasKey("x1") && nbt.hasKey("x2")) {
nbt.setInteger("cx1", nbt.getInteger("x1"));
nbt.setInteger("cy1", nbt.getInteger("y1"));
nbt.setInteger("cz1", nbt.getInteger("z1"));
nbt.setInteger("cx2", nbt.getInteger("x2"));
nbt.setInteger("cy2", nbt.getInteger("y2"));
nbt.setInteger("cz2", nbt.getInteger("z2"));
//This is how far away the player is from the copy/paste
nbt.setInteger("cxo", (int) Math.floor(nbt.getInteger("x1") - player.posX) + 1);
nbt.setInteger("cyo", (int) Math.floor(nbt.getInteger("y1") - player.posY));
nbt.setInteger("czo", (int) Math.floor(nbt.getInteger("z1") - player.posZ) + 1);
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Copied to clipboard"));
}
} else {
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Invalid selection"));
}
}
break;
case 2:
int x = (int) player.posX;
int y = (int) player.posY;
int z = (int) player.posZ;
if (nbt.hasKey("cx1")) {
int cx1 = nbt.getInteger("cx1");
int cy1 = nbt.getInteger("cy1");
int cz1 = nbt.getInteger("cz1");
int cx2 = nbt.getInteger("cx2");
int cy2 = nbt.getInteger("cy2");
int cz2 = nbt.getInteger("cz2");
int xo = nbt.getInteger("cxo");
int yo = nbt.getInteger("cyo");
int zo = nbt.getInteger("czo");
//For simplicities sake, c*1 is lower than c*2
if (cx1 > cx2) {
//Yes, yes, bitwise
//But thats just a party trick
//I mean, if you go to some nerdy parties
int t = cx1;
cx1 = cx2;
cx2 = t;
}
if (cy1 > cy2) {
int t = cy1;
cy1 = cy2;
cy2 = t;
}
if (cz1 > cz2) {
int t = cz1;
cz1 = cz2;
cz2 = t;
}
int xi = cx1;
do {
int yi = cy1;
do {
int zi = cz1;
do {
int dx = xi - cx1;
int dy = yi - cy1;
int dz = zi - cz1;
if (world.isAirBlock(x + dx + xo, y + dy + yo, z + dz + zo) && !world.isRemote) {
Block block = world.getBlock(cx1 + dx, cy1 + dy, cz1 + dz);
Item item = block.getItem(world, cx1 + dx, cy1 + dy, cz1 + dz);
int worldDmg = world.getBlockMetadata(cx1 + dx, cy1 + dy, cz1 + dz);
int dmg = block.getDamageValue(world, cx1 + dx, cy1 + dy, cz1 + dz);
boolean usesMetadataForPlacing = false;
ArrayList<ItemStack> drops = block.getDrops(world, cx1 + dx, cy1 + dy, cz1 + dz, dmg, 0);
if (drops.size() == 1) {
ItemStack dropStack = drops.get(0);
usesMetadataForPlacing = dropStack.getItem() == item && dropStack.getItemDamage() == 0 && worldDmg != 0;
}
if (player.capabilities.isCreativeMode) {
world.setBlock(x + dx + xo, y + dy + yo, z + dz + zo, block, worldDmg, 3);
} else if (player.inventory.hasItemStack(new ItemStack(item, 1, dmg))) {
int slot = slotOfItemStack(new ItemStack(item, 1, dmg), player.inventory);
if (item instanceof ItemBlock) {
if (!world.isRemote) {
((ItemBlock) item).placeBlockAt(player.inventory.getStackInSlot(slot), player, world, x + dx + xo, y + dy + yo, z + dz + zo, 0, 0, 0, 0, dmg);
if (usesMetadataForPlacing) {
world.setBlockMetadataWithNotify(x + dx + xo, y + dy + yo, z + dz + zo, worldDmg, 3);
}
}
player.inventory.decrStackSize(slot, 1);
}
}
}
zi++;
} while (zi <= cz2);
yi++;
} while (yi <= cy2);
xi++;
} while (xi <= cx2);
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Successfully pasted building"));
}
} else {
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Nothing copied"));
}
}
break;
}
}
return stack;
}
//Adapted from InventoryPlayer.hasItemStack
public int slotOfItemStack(ItemStack stack, InventoryPlayer inv) {
int i;
for (i = 0; i < inv.mainInventory.length; ++i) {
if (inv.mainInventory[i] != null && inv.mainInventory[i].isItemEqual(stack)) {
return i;
}
}
return -1;
}
@Override
public ArrayList<Object> getSpecialParameters() {
return null;
}
@Override
public String getItemName() {
return "prismaticWand";
}
@Override
public boolean shouldRegister() {
return true;
}
@Override
public boolean shouldDisplayInTab() {
return true;
}
@Override
public ThaumicTinkererRecipe getRecipeItem() {
return new CraftingBenchRecipe(new ItemStack(this), " P ", " I ", " I ", 'P', ItemMaterial.getPrism(), 'I', new ItemStack(Items.blaze_rod));
}
@Override
public void registerIcons(IIconRegister register) {
itemIcon = register.registerIcon("aura:prismaticWand");
}
@Override
public int getCreativeTabPriority() {
return -25;
}
}
| src/main/java/pixlepix/auracascade/item/ItemPrismaticWand.java | package pixlepix.auracascade.item;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ChatComponentText;
import net.minecraft.world.World;
import pixlepix.auracascade.registry.CraftingBenchRecipe;
import pixlepix.auracascade.registry.ITTinkererItem;
import pixlepix.auracascade.registry.ThaumicTinkererRecipe;
import java.util.ArrayList;
import java.util.List;
/**
* Created by localmacaccount on 5/16/15.
*/
public class ItemPrismaticWand extends Item implements ITTinkererItem {
public static String[] modes = new String[]{"Selection", "Copy", "Paste"};
public ItemPrismaticWand() {
super();
setMaxStackSize(1);
}
@Override
public boolean onItemUseFirst(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int p_77648_7_, float p_77648_8_, float p_77648_9_, float p_77648_10_) {
//More specific selections
if (!player.isSneaking() && !world.isRemote) {
if (stack.stackTagCompound == null) {
stack.stackTagCompound = new NBTTagCompound();
}
NBTTagCompound nbt = stack.stackTagCompound;
if (stack.getItemDamage() == 0) {
if (nbt.hasKey("x1")) {
nbt.setInteger("x2", nbt.getInteger("x1"));
nbt.setInteger("y2", nbt.getInteger("y1"));
nbt.setInteger("z2", nbt.getInteger("z1"));
}
nbt.setInteger("x1", x);
nbt.setInteger("y1", y);
nbt.setInteger("z1", z);
player.addChatComponentMessage(new ChatComponentText("Position set"));
return true;
}
}
return false;
}
@Override
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean p_77624_4_) {
super.addInformation(stack, player, list, p_77624_4_);
list.add(modes[stack.getItemDamage()]);
}
@Override
public ItemStack onItemRightClick(ItemStack stack, World world, EntityPlayer player) {
int mode = stack.getItemDamage();
if (player.isSneaking()) {
NBTTagCompound nbt = stack.stackTagCompound;
mode++;
mode = mode % modes.length;
stack.setItemDamage(mode);
stack.stackTagCompound = nbt;
} else {
if (stack.stackTagCompound == null) {
stack.stackTagCompound = new NBTTagCompound();
}
NBTTagCompound nbt = stack.stackTagCompound;
switch (mode) {
/*
case 0:
//Make sure onItemUseFirst hasn't already grabbed it
if (nbt.hasKey("x1")) {
nbt.setInteger("x2", nbt.getInteger("x1"));
nbt.setInteger("y2", nbt.getInteger("y1"));
nbt.setInteger("z2", nbt.getInteger("z1"));
}
nbt.setInteger("x1", (int) player.posX);
nbt.setInteger("y1", (int) player.posY);
nbt.setInteger("z1", (int) player.posZ);
player.addChatComponentMessage(new ChatComponentText("Position set"));
break;
*/
case 1:
if (nbt.hasKey("x1") && nbt.hasKey("x2")) {
nbt.setInteger("cx1", nbt.getInteger("x1"));
nbt.setInteger("cy1", nbt.getInteger("y1"));
nbt.setInteger("cz1", nbt.getInteger("z1"));
nbt.setInteger("cx2", nbt.getInteger("x2"));
nbt.setInteger("cy2", nbt.getInteger("y2"));
nbt.setInteger("cz2", nbt.getInteger("z2"));
//This is how far away the player is from the copy/paste
nbt.setInteger("cxo", (int) Math.floor(nbt.getInteger("x1") - player.posX) + 1);
nbt.setInteger("cyo", (int) Math.floor(nbt.getInteger("y1") - player.posY));
nbt.setInteger("czo", (int) Math.floor(nbt.getInteger("z1") - player.posZ) + 1);
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Copied to clipboard"));
}
} else {
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Invalid selection"));
}
}
break;
case 2:
int x = (int) player.posX;
int y = (int) player.posY;
int z = (int) player.posZ;
if (nbt.hasKey("cx1")) {
int cx1 = nbt.getInteger("cx1");
int cy1 = nbt.getInteger("cy1");
int cz1 = nbt.getInteger("cz1");
int cx2 = nbt.getInteger("cx2");
int cy2 = nbt.getInteger("cy2");
int cz2 = nbt.getInteger("cz2");
int xo = nbt.getInteger("cxo");
int yo = nbt.getInteger("cyo");
int zo = nbt.getInteger("czo");
//For simplicities sake, c*1 is lower than c*2
if (cx1 > cx2) {
//Yes, yes, bitwise
//But thats just a party trick
//I mean, if you go to some nerdy parties
int t = cx1;
cx1 = cx2;
cx2 = t;
}
if (cy1 > cy2) {
int t = cy1;
cy1 = cy2;
cy2 = t;
}
if (cz1 > cz2) {
int t = cz1;
cz1 = cz2;
cz2 = t;
}
int xi = cx1;
do {
int yi = cy1;
do {
int zi = cz1;
do {
int dx = xi - cx1;
int dy = yi - cy1;
int dz = zi - cz1;
if (world.isAirBlock(x + dx + xo, y + dy + yo, z + dz + zo) && !world.isRemote) {
Block block = world.getBlock(cx1 + dx, cy1 + dy, cz1 + dz);
Item item = block.getItem(world, cx1 + dx, cy1 + dy, cz1 + dz);
int worldDmg = world.getBlockMetadata(cx1 + dx, cy1 + dy, cz1 + dz);
int dmg = block.getDamageValue(world, cx1 + dx, cy1 + dy, cz1 + dz);
boolean usesMetadataForPlacing = false;
ArrayList<ItemStack> drops = block.getDrops(world, cx1 + dx, cy1 + dy, cz1 + dz, dmg, 0);
if (drops.size() == 1) {
ItemStack dropStack = drops.get(0);
usesMetadataForPlacing = dropStack.getItem() == item && dropStack.getItemDamage() == 0 && worldDmg != 0;
}
if (player.capabilities.isCreativeMode) {
world.setBlock(x + dx + xo, y + dy + yo, z + dz + zo, block, worldDmg, 3);
} else if (player.inventory.hasItemStack(new ItemStack(item, 1, dmg))) {
int slot = slotOfItemStack(new ItemStack(item, 1, dmg), player.inventory);
if (item instanceof ItemBlock) {
if (!world.isRemote) {
((ItemBlock) item).placeBlockAt(player.inventory.getStackInSlot(slot), player, world, x + dx + xo, y + dy + yo, z + dz + zo, 0, 0, 0, 0, dmg);
if (usesMetadataForPlacing) {
world.setBlockMetadataWithNotify(x + dx + xo, y + dy + yo, z + dz + zo, worldDmg, 3);
}
}
player.inventory.decrStackSize(slot, 1);
}
}
}
zi++;
} while (zi <= cz2);
yi++;
} while (yi <= cy2);
xi++;
} while (xi <= cx2);
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Successfully pasted building"));
}
} else {
if (!world.isRemote) {
player.addChatComponentMessage(new ChatComponentText("Nothing copied"));
}
}
break;
}
}
return stack;
}
//Adapted from InventoryPlayer.hasItemStack
public int slotOfItemStack(ItemStack stack, InventoryPlayer inv) {
int i;
for (i = 0; i < inv.mainInventory.length; ++i) {
if (inv.mainInventory[i] != null && inv.mainInventory[i].isItemEqual(stack)) {
return i;
}
}
return -1;
}
@Override
public ArrayList<Object> getSpecialParameters() {
return null;
}
@Override
public String getItemName() {
return "prismaticWand";
}
@Override
public boolean shouldRegister() {
return true;
}
@Override
public boolean shouldDisplayInTab() {
return true;
}
@Override
public ThaumicTinkererRecipe getRecipeItem() {
return new CraftingBenchRecipe(new ItemStack(this), " P ", " I ", " I ", 'P', ItemMaterial.getPrism(), 'I', new ItemStack(Items.blaze_rod));
}
@Override
public void registerIcons(IIconRegister register) {
itemIcon = register.registerIcon("aura:prismaticWand");
}
@Override
public int getCreativeTabPriority() {
return -25;
}
}
| Added chat to the prismatic wand
| src/main/java/pixlepix/auracascade/item/ItemPrismaticWand.java | Added chat to the prismatic wand | <ide><path>rc/main/java/pixlepix/auracascade/item/ItemPrismaticWand.java
<ide> import net.minecraft.nbt.NBTTagCompound;
<ide> import net.minecraft.util.ChatComponentText;
<ide> import net.minecraft.world.World;
<add>import pixlepix.auracascade.main.EnumColor;
<ide> import pixlepix.auracascade.registry.CraftingBenchRecipe;
<ide> import pixlepix.auracascade.registry.ITTinkererItem;
<ide> import pixlepix.auracascade.registry.ThaumicTinkererRecipe;
<ide> */
<ide> public class ItemPrismaticWand extends Item implements ITTinkererItem {
<ide>
<del> public static String[] modes = new String[]{"Selection", "Copy", "Paste"};
<add> public static String[] modes = new String[]{EnumColor.AQUA + "Selection", EnumColor.YELLOW + "Copy", EnumColor.ORANGE + "Paste"};
<add>
<ide>
<ide> public ItemPrismaticWand() {
<ide> super();
<ide> mode = mode % modes.length;
<ide> stack.setItemDamage(mode);
<ide> stack.stackTagCompound = nbt;
<add> if (!world.isRemote) {
<add> player.addChatComponentMessage(new ChatComponentText("Switched to: " + modes[mode]));
<add> }
<ide> } else {
<ide> if (stack.stackTagCompound == null) {
<ide> stack.stackTagCompound = new NBTTagCompound(); |
|
Java | agpl-3.0 | 3dea4354c6a8ff6d9d29e02b995d2e1ba1864857 | 0 | opengeogroep/safetymaps-server,opengeogroep/safetymaps-server,opengeogroep/safetymaps-server | package nl.opengeogroep.safetymaps.server.admin.stripes;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
import javax.mail.Message.RecipientType;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import net.sourceforge.stripes.action.*;
import nl.opengeogroep.safetymaps.server.db.Cfg;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.text.StrLookup;
import org.apache.commons.lang3.text.StrSubstitutor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONObject;
/**
*
* @author Matthijs Laan
*/
@StrictBinding
@UrlBinding("/action/mail")
public class MailActionBean implements ActionBean {
private ActionBeanContext context;
private static final Log log = LogFactory.getLog("cfg");
@Override
public ActionBeanContext getContext() {
return context;
}
@Override
public void setContext(ActionBeanContext context) {
this.context = context;
}
public Resolution mail() throws IOException {
Session session;
JSONObject response = new JSONObject();
response.put("result", false);
try {
Context ctx = new InitialContext();
session = (Session)ctx.lookup("java:comp/env/mail/session");
} catch(Exception e) {
log.error("Mail session not configured correctly, exception looking up JNDI resource", e);
response.put("error", "Server not configured correctly to send mail");
return new StreamingResolution("application/json", response.toString());
}
String mail, to, from, subject;
try {
String template = Cfg.getSetting("support_mail_template");
if(template == null) {
template = FileUtils.readFileToString(new File(context.getServletContext().getRealPath("/WEB-INF/mail.txt")));
}
final Map<String,String[]> parameters = new HashMap(context.getRequest().getParameterMap());
String replace = Cfg.getSetting("support_mail_replace_search");
String replacement = Cfg.getSetting("support_mail_replacement");
if(replace != null && replacement != null && parameters.containsKey("permalink")) {
String permalink = parameters.get("permalink")[0];
permalink = Pattern.compile(replace).matcher(permalink).replaceAll(replacement);
parameters.put("permalink", new String[] { permalink });
}
StrSubstitutor request = new StrSubstitutor(new StrLookup<String>() {
@Override
public String lookup(String key) {
String[] value = parameters.get(key);
if(value == null || value.length == 0) {
return "";
} else {
return value[0];
}
}
});
mail = request.replace(template);
to = Cfg.getSetting("support_mail_to");
from = Cfg.getSetting("support_mail_from");
subject = Cfg.getSetting("support_mail_subject");
if(to == null || from == null || subject == null) {
log.error("Missing safetymaps.settings keys for either support_mail_to, support_mail_from or support_mail_subject");
response.put("error", "Server configuration error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
subject = request.replace(subject);
log.debug("Sending formatted mail to: " + to + ", subject: " + subject + ", body: " + mail);
log.info("Sending mail to " + to + ", received request from " + context.getRequest().getRemoteAddr());
} catch(Exception e) {
log.error("Error formatting mail", e);
response.put("error", "Server error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
try {
MimeMessage msg = new MimeMessage(session);
msg.setFrom(from);
msg.addRecipient(RecipientType.TO, new InternetAddress(to));
String sender = context.getRequest().getParameter("email");
if(sender != null) {
msg.addRecipient(RecipientType.CC, new InternetAddress(sender));
}
msg.setSubject(subject);
msg.setSentDate(new Date());
msg.setContent(mail, "text/plain");
Transport.send(msg);
} catch(Exception e) {
log.error("Error formatting mail", e);
response.put("error", "Server error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
response.put("result", true);
return new StreamingResolution("application/json", response.toString());
}
}
| src/main/java/nl/opengeogroep/safetymaps/server/admin/stripes/MailActionBean.java | package nl.opengeogroep.safetymaps.server.admin.stripes;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
import javax.mail.Message.RecipientType;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import net.sourceforge.stripes.action.*;
import nl.opengeogroep.safetymaps.server.db.Cfg;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.text.StrLookup;
import org.apache.commons.lang3.text.StrSubstitutor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONObject;
/**
*
* @author Matthijs Laan
*/
@StrictBinding
@UrlBinding("/action/mail")
public class MailActionBean implements ActionBean {
private ActionBeanContext context;
private static final Log log = LogFactory.getLog("cfg");
@Override
public ActionBeanContext getContext() {
return context;
}
@Override
public void setContext(ActionBeanContext context) {
this.context = context;
}
public Resolution mail() throws IOException {
Session session;
JSONObject response = new JSONObject();
response.put("result", false);
try {
Context ctx = new InitialContext();
session = (Session)ctx.lookup("java:comp/env/mail/session");
} catch(Exception e) {
log.error("Mail session not configured correctly, exception looking up JNDI resource", e);
response.put("error", "Server not configured correctly to send mail");
return new StreamingResolution("application/json", response.toString());
}
String mail, to, from, subject;
try {
String template = Cfg.getSetting("support_mail_template");
if(template == null) {
template = FileUtils.readFileToString(new File(context.getServletContext().getRealPath("/WEB-INF/mail.txt")));
}
final Map<String,String[]> parameters = new HashMap(context.getRequest().getParameterMap());
String replace = Cfg.getSetting("support_mail_replace_search");
String replacement = Cfg.getSetting("support_mail_replacement");
if(replace != null && replacement != null && parameters.containsKey("permalink")) {
String permalink = parameters.get("permalink")[0];
permalink = Pattern.compile(replace).matcher(permalink).replaceAll(replacement);
parameters.put("permalink", new String[] { permalink });
}
StrSubstitutor request = new StrSubstitutor(new StrLookup<String>() {
@Override
public String lookup(String key) {
String[] value = parameters.get(key);
if(value == null || value.length == 0) {
return "";
} else {
return value[0];
}
}
});
mail = request.replace(template);
to = Cfg.getSetting("support_mail_to");
from = Cfg.getSetting("support_mail_from");
subject = Cfg.getSetting("support_mail_subject");
if(to == null || from == null || subject == null) {
log.error("Missing safetymaps.settings keys for either support_mail_to, support_mail_from or support_mail_subject");
response.put("error", "Server configuration error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
subject = request.replace(subject);
log.debug("Sending formatted mail to: " + to + ", subject: " + subject + ", body: " + mail);
log.info("Sending mail to " + to + ", received request from " + context.getRequest().getRemoteAddr());
} catch(Exception e) {
log.error("Error formatting mail", e);
response.put("error", "Server error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
try {
MimeMessage msg = new MimeMessage(session);
msg.setFrom(from);
msg.addRecipient(RecipientType.TO, new InternetAddress(to));
msg.setSubject(subject);
msg.setSentDate(new Date());
msg.setContent(mail, "text/plain");
Transport.send(msg);
} catch(Exception e) {
log.error("Error formatting mail", e);
response.put("error", "Server error formatting mail");
return new StreamingResolution("application/json", response.toString());
}
response.put("result", true);
return new StreamingResolution("application/json", response.toString());
}
}
| send cc to sender
| src/main/java/nl/opengeogroep/safetymaps/server/admin/stripes/MailActionBean.java | send cc to sender | <ide><path>rc/main/java/nl/opengeogroep/safetymaps/server/admin/stripes/MailActionBean.java
<ide> MimeMessage msg = new MimeMessage(session);
<ide> msg.setFrom(from);
<ide> msg.addRecipient(RecipientType.TO, new InternetAddress(to));
<add> String sender = context.getRequest().getParameter("email");
<add> if(sender != null) {
<add> msg.addRecipient(RecipientType.CC, new InternetAddress(sender));
<add> }
<ide> msg.setSubject(subject);
<ide> msg.setSentDate(new Date());
<ide> msg.setContent(mail, "text/plain"); |
|
Java | apache-2.0 | 301497f8e774e00997067a461d81802623a25fae | 0 | dotta/async-http-client,Aulust/async-http-client,Aulust/async-http-client | /*
* Copyright (c) 2012 Sonatype, Inc. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient;
import static org.asynchttpclient.Dsl.*;
import static org.testng.Assert.*;
import java.io.IOException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.asynchttpclient.filter.FilterContext;
import org.asynchttpclient.filter.FilterException;
import org.asynchttpclient.filter.ResponseFilter;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.testng.annotations.Test;
public class PostRedirectGetTest extends AbstractBasicTest {
// ------------------------------------------------------ Test Configuration
@Override
public AbstractHandler configureHandler() throws Exception {
return new PostRedirectGetHandler();
}
// ------------------------------------------------------------ Test Methods
@Test(groups = { "standalone", "post_redirect_get" })
public void postRedirectGet302Test() throws Exception {
doTestPositive(302);
}
@Test(groups = { "standalone", "post_redirect_get" })
public void postRedirectGet302StrictTest() throws Exception {
doTestNegative(302, true);
}
@Test(groups = { "standalone", "post_redirect_get" })
public void postRedirectGet303Test() throws Exception {
doTestPositive(303);
}
@Test(groups = { "standalone", "post_redirect_get" })
public void postRedirectGet301Test() throws Exception {
doTestPositive(301);
}
@Test(groups = { "standalone", "post_redirect_get" })
public void postRedirectGet307Test() throws Exception {
doTestNegative(307, false);
}
// --------------------------------------------------------- Private Methods
private void doTestNegative(final int status, boolean strict) throws Exception {
ResponseFilter responseFilter = new ResponseFilter() {
@Override
public <T> FilterContext<T> filter(FilterContext<T> ctx) throws FilterException {
// pass on the x-expect-get and remove the x-redirect
// headers if found in the response
ctx.getResponseHeaders().getHeaders().get("x-expect-post");
ctx.getRequest().getHeaders().add("x-expect-post", "true");
ctx.getRequest().getHeaders().remove("x-redirect");
return ctx;
}
};
try (AsyncHttpClient p = asyncHttpClient(config().setFollowRedirect(true).setStrict302Handling(strict).addResponseFilter(responseFilter))) {
Request request = new RequestBuilder("POST").setUrl(getTargetUrl()).addFormParam("q", "a b").addHeader("x-redirect", +status + "@" + "http://localhost:" + port1 + "/foo/bar/baz").addHeader("x-negative", "true").build();
Future<Integer> responseFuture = p.executeRequest(request, new AsyncCompletionHandler<Integer>() {
@Override
public Integer onCompleted(Response response) throws Exception {
return response.getStatusCode();
}
@Override
public void onThrowable(Throwable t) {
t.printStackTrace();
fail("Unexpected exception: " + t.getMessage(), t);
}
});
int statusCode = responseFuture.get();
assertEquals(statusCode, 200);
}
}
private void doTestPositive(final int status) throws Exception {
ResponseFilter responseFilter = new ResponseFilter() {
@Override
public <T> FilterContext<T> filter(FilterContext<T> ctx) throws FilterException {
// pass on the x-expect-get and remove the x-redirect
// headers if found in the response
ctx.getResponseHeaders().getHeaders().get("x-expect-get");
ctx.getRequest().getHeaders().add("x-expect-get", "true");
ctx.getRequest().getHeaders().remove("x-redirect");
return ctx;
}
};
try (AsyncHttpClient p = asyncHttpClient(config().setFollowRedirect(true).addResponseFilter(responseFilter))) {
Request request = new RequestBuilder("POST").setUrl(getTargetUrl()).addFormParam("q", "a b").addHeader("x-redirect", +status + "@" + "http://localhost:" + port1 + "/foo/bar/baz").build();
Future<Integer> responseFuture = p.executeRequest(request, new AsyncCompletionHandler<Integer>() {
@Override
public Integer onCompleted(Response response) throws Exception {
return response.getStatusCode();
}
@Override
public void onThrowable(Throwable t) {
t.printStackTrace();
fail("Unexpected exception: " + t.getMessage(), t);
}
});
int statusCode = responseFuture.get();
assertEquals(statusCode, 200);
}
}
// ---------------------------------------------------------- Nested Classes
public static class PostRedirectGetHandler extends AbstractHandler {
final AtomicInteger counter = new AtomicInteger();
@Override
public void handle(String pathInContext, org.eclipse.jetty.server.Request request, HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws IOException, ServletException {
final boolean expectGet = (httpRequest.getHeader("x-expect-get") != null);
final boolean expectPost = (httpRequest.getHeader("x-expect-post") != null);
if (expectGet) {
final String method = request.getMethod();
if (!"GET".equals(method)) {
httpResponse.sendError(500, "Incorrect method. Expected GET, received " + method);
return;
}
httpResponse.setStatus(200);
httpResponse.getOutputStream().write("OK".getBytes());
httpResponse.getOutputStream().flush();
return;
} else if (expectPost) {
final String method = request.getMethod();
if (!"POST".equals(method)) {
httpResponse.sendError(500, "Incorrect method. Expected POST, received " + method);
return;
}
httpResponse.setStatus(200);
httpResponse.getOutputStream().write("OK".getBytes());
httpResponse.getOutputStream().flush();
return;
}
String header = httpRequest.getHeader("x-redirect");
if (header != null) {
// format for header is <status code>|<location url>
String[] parts = header.split("@");
int redirectCode;
try {
redirectCode = Integer.parseInt(parts[0]);
} catch (Exception ex) {
ex.printStackTrace();
httpResponse.sendError(500, "Unable to parse redirect code");
return;
}
httpResponse.setStatus(redirectCode);
if (httpRequest.getHeader("x-negative") == null) {
httpResponse.addHeader("x-expect-get", "true");
} else {
httpResponse.addHeader("x-expect-post", "true");
}
httpResponse.setContentLength(0);
httpResponse.addHeader("Location", parts[1] + counter.getAndIncrement());
httpResponse.getOutputStream().flush();
return;
}
httpResponse.sendError(500);
httpResponse.getOutputStream().flush();
httpResponse.getOutputStream().close();
}
}
}
| client/src/test/java/org/asynchttpclient/PostRedirectGetTest.java | /*
* Copyright (c) 2012 Sonatype, Inc. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient;
import static org.asynchttpclient.Dsl.*;
import static org.testng.Assert.*;
import java.io.IOException;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.asynchttpclient.filter.FilterContext;
import org.asynchttpclient.filter.FilterException;
import org.asynchttpclient.filter.ResponseFilter;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.testng.annotations.Test;
public class PostRedirectGetTest extends AbstractBasicTest {
// ------------------------------------------------------ Test Configuration
@Override
public AbstractHandler configureHandler() throws Exception {
return new PostRedirectGetHandler();
}
// ------------------------------------------------------------ Test Methods
@Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
public void postRedirectGet302Test() throws Exception {
doTestPositive(302);
}
@Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
public void postRedirectGet302StrictTest() throws Exception {
doTestNegative(302, true);
}
@Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
public void postRedirectGet303Test() throws Exception {
doTestPositive(303);
}
@Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
public void postRedirectGet301Test() throws Exception {
doTestNegative(301, false);
}
@Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
public void postRedirectGet307Test() throws Exception {
doTestNegative(307, false);
}
// --------------------------------------------------------- Private Methods
private void doTestNegative(final int status, boolean strict) throws Exception {
ResponseFilter responseFilter = new ResponseFilter() {
@Override
public <T> FilterContext<T> filter(FilterContext<T> ctx) throws FilterException {
// pass on the x-expect-get and remove the x-redirect
// headers if found in the response
ctx.getResponseHeaders().getHeaders().get("x-expect-post");
ctx.getRequest().getHeaders().add("x-expect-post", "true");
ctx.getRequest().getHeaders().remove("x-redirect");
return ctx;
}
};
try (AsyncHttpClient p = asyncHttpClient(config().setFollowRedirect(true).setStrict302Handling(strict).addResponseFilter(responseFilter))) {
Request request = new RequestBuilder("POST").setUrl(getTargetUrl()).addFormParam("q", "a b").addHeader("x-redirect", +status + "@" + "http://localhost:" + port1 + "/foo/bar/baz").addHeader("x-negative", "true").build();
Future<Integer> responseFuture = p.executeRequest(request, new AsyncCompletionHandler<Integer>() {
@Override
public Integer onCompleted(Response response) throws Exception {
return response.getStatusCode();
}
@Override
public void onThrowable(Throwable t) {
t.printStackTrace();
fail("Unexpected exception: " + t.getMessage(), t);
}
});
int statusCode = responseFuture.get();
assertEquals(statusCode, 200);
}
}
private void doTestPositive(final int status) throws Exception {
ResponseFilter responseFilter = new ResponseFilter() {
@Override
public <T> FilterContext<T> filter(FilterContext<T> ctx) throws FilterException {
// pass on the x-expect-get and remove the x-redirect
// headers if found in the response
ctx.getResponseHeaders().getHeaders().get("x-expect-get");
ctx.getRequest().getHeaders().add("x-expect-get", "true");
ctx.getRequest().getHeaders().remove("x-redirect");
return ctx;
}
};
try (AsyncHttpClient p = asyncHttpClient(config().setFollowRedirect(true).addResponseFilter(responseFilter))) {
Request request = new RequestBuilder("POST").setUrl(getTargetUrl()).addFormParam("q", "a b").addHeader("x-redirect", +status + "@" + "http://localhost:" + port1 + "/foo/bar/baz").build();
Future<Integer> responseFuture = p.executeRequest(request, new AsyncCompletionHandler<Integer>() {
@Override
public Integer onCompleted(Response response) throws Exception {
return response.getStatusCode();
}
@Override
public void onThrowable(Throwable t) {
t.printStackTrace();
fail("Unexpected exception: " + t.getMessage(), t);
}
});
int statusCode = responseFuture.get();
assertEquals(statusCode, 200);
}
}
// ---------------------------------------------------------- Nested Classes
public static class PostRedirectGetHandler extends AbstractHandler {
final AtomicInteger counter = new AtomicInteger();
@Override
public void handle(String pathInContext, org.eclipse.jetty.server.Request request, HttpServletRequest httpRequest, HttpServletResponse httpResponse) throws IOException, ServletException {
final boolean expectGet = (httpRequest.getHeader("x-expect-get") != null);
final boolean expectPost = (httpRequest.getHeader("x-expect-post") != null);
if (expectGet) {
final String method = request.getMethod();
if (!"GET".equals(method)) {
httpResponse.sendError(500, "Incorrect method. Expected GET, received " + method);
return;
}
httpResponse.setStatus(200);
httpResponse.getOutputStream().write("OK".getBytes());
httpResponse.getOutputStream().flush();
return;
} else if (expectPost) {
final String method = request.getMethod();
if (!"POST".equals(method)) {
httpResponse.sendError(500, "Incorrect method. Expected POST, received " + method);
return;
}
httpResponse.setStatus(200);
httpResponse.getOutputStream().write("OK".getBytes());
httpResponse.getOutputStream().flush();
return;
}
String header = httpRequest.getHeader("x-redirect");
if (header != null) {
// format for header is <status code>|<location url>
String[] parts = header.split("@");
int redirectCode;
try {
redirectCode = Integer.parseInt(parts[0]);
} catch (Exception ex) {
ex.printStackTrace();
httpResponse.sendError(500, "Unable to parse redirect code");
return;
}
httpResponse.setStatus(redirectCode);
if (httpRequest.getHeader("x-negative") == null) {
httpResponse.addHeader("x-expect-get", "true");
} else {
httpResponse.addHeader("x-expect-post", "true");
}
httpResponse.setContentLength(0);
httpResponse.addHeader("Location", parts[1] + counter.getAndIncrement());
httpResponse.getOutputStream().flush();
return;
}
httpResponse.sendError(500);
httpResponse.getOutputStream().flush();
httpResponse.getOutputStream().close();
}
}
}
| Fix PostRedirectGetTest, close #761
| client/src/test/java/org/asynchttpclient/PostRedirectGetTest.java | Fix PostRedirectGetTest, close #761 | <ide><path>lient/src/test/java/org/asynchttpclient/PostRedirectGetTest.java
<ide>
<ide> // ------------------------------------------------------------ Test Methods
<ide>
<del> @Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
<add> @Test(groups = { "standalone", "post_redirect_get" })
<ide> public void postRedirectGet302Test() throws Exception {
<ide> doTestPositive(302);
<ide> }
<ide>
<del> @Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
<add> @Test(groups = { "standalone", "post_redirect_get" })
<ide> public void postRedirectGet302StrictTest() throws Exception {
<ide> doTestNegative(302, true);
<ide> }
<ide>
<del> @Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
<add> @Test(groups = { "standalone", "post_redirect_get" })
<ide> public void postRedirectGet303Test() throws Exception {
<ide> doTestPositive(303);
<ide> }
<ide>
<del> @Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
<add> @Test(groups = { "standalone", "post_redirect_get" })
<ide> public void postRedirectGet301Test() throws Exception {
<del> doTestNegative(301, false);
<del> }
<del>
<del> @Test(groups = { "standalone", "post_redirect_get" }, enabled = false)
<add> doTestPositive(301);
<add> }
<add>
<add> @Test(groups = { "standalone", "post_redirect_get" })
<ide> public void postRedirectGet307Test() throws Exception {
<ide> doTestNegative(307, false);
<ide> } |
|
JavaScript | mit | 5e455c146e8ee68630305ca17c43dbf8600f760d | 0 | AndrewCMartin/idb,AndrewCMartin/idb,AndrewCMartin/idb | import React, {PropTypes} from 'react'
import {Link} from 'react-router-dom'
import {DropdownButton, MenuItem, Pagination} from 'react-bootstrap'
var axios = require('axios');
var imageStyles = {
height: '500px',
}
function splitarray(input, spacing)
{
var output = [];
for (var i = 0; i < input.length; i += spacing)
{
output[output.length] = input.slice(i, i + spacing);
}
return output;
}
class Actors extends React.Component {
constructor(props) {
super(props);
this.handleSelectSort = this.handleSelectSort.bind(this);
this.handleSelectDirection = this.handleSelectDirection.bind(this);
this.handleSelect = this.handleSelect.bind(this);
this.updateItems = this.updateItems.bind(this);
this.state = this.getInitialState();
this.updateItems();
}
getInitialState() {
return {
actors: [],
actorsGrouped: [],
numPages: 1,
activePage: 1,
resultsPerPage: 6,
orderBy: 'name',
orderDirection: 'asc',
q: {'order_by': [{"field": "name", "direction": "asc"}]}
};
}
// componentDidMount() {
// this.updateItems();
// }
updateItems() {
axios.get('http://marvelus.me/api/actor', {
params: {
results_per_page: this.state.resultsPerPage,
page: this.state.activePage,
q: JSON.stringify(this.state.q),
}
}).then(res => {
this.state.numPages = res.data.total_pages;
const actors = res.data.objects.map(actor => actor);
//this.setState({actors});
const actorsGrouped = splitarray(actors, 3)
this.setState({actorsGrouped});
console.log(this.state.actorsGrouped);
});
}
handleSelect(eventKey) {
this.state.activePage = eventKey;
this.updateItems();
}
handleSelectSort(eventKey) {
// this.state.orderBy = eventKey;
this.state.q.order_by[0].field = eventKey;
this.updateItems()
}
handleSelectDirection(eventKey) {
// this.state.orderDirection = eventKey;
this.state.q.order_by[0].direction = eventKey;
this.updateItems();
}
renderDropdownButtonSortby(title, i) {
return (
<DropdownButton bsStyle="primary" title={title} key={"name"} id={'dropdown-basic-${i}'}
onSelect={this.handleSelectSort}>
<MenuItem eventKey="name">Name</MenuItem>
<MenuItem eventKey="birthday">Birthday</MenuItem>
</DropdownButton>
);
}
renderDropdownButtonSortDirection(title, i) {
return (
<DropdownButton bsStyle="primary" title={title} onSelect={this.handleSelectDirection}>
<MenuItem eventKey="asc">Ascending</MenuItem>
<MenuItem eventKey="desc">Descending</MenuItem>
</DropdownButton>
);
}
render() {
return (
<div className="container" styles="margin-top:100px;">
<div className="row">
<div className='text-center'>
{this.renderDropdownButtonSortby("Sort By: ", "name")}
{this.renderDropdownButtonSortDirection("Order", "")}
</div>
</div>
{this.state.actorsGrouped.length > 0 ?
<div className="row">
{this.state.actorsGrouped.map(actor =>
<div className="col-sm-4">
<div className="panel panel-info">
<div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
className="img-responsive" style={imageStyles}
alt="Image"/></div>
</div>
)}
</div>
:null}
{this.state.actorsGrouped[0]?
this.state.actorsGrouped[0].map(actor =>
<div className="col-sm-4">
<div className="panel panel-info">
<div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
className="img-responsive" style={imageStyles}
alt="Image"/></div>
</div>
</div>
):null}
<div className="row">
{this.state.actorsGrouped[1]?
this.state.actorsGrouped[1].map(actor =>
<div className="col-sm-4">
<div className="panel panel-info">
<div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
className="img-responsive" style={imageStyles}
alt="Image"/></div>
</div>
</div>
):null}
<div className='text-center'>
{!this.state.numPages
? null
: <Pagination
bsSize='large'
prev
next
first
last
ellipsis
boundaryLinks
items={this.state.numPages}
maxButtons={10}
activePage={this.state.activePage}
onSelect={this.handleSelect}/>
}
</div>
</div>
</div>
</div>
);
}
}
export default Actors
| src/Actors.js | import React, {PropTypes} from 'react'
import {Link} from 'react-router-dom'
import {DropdownButton, MenuItem, Pagination} from 'react-bootstrap'
var axios = require('axios');
var imageStyles = {
height: '500px',
}
function splitarray(input, spacing)
{
var output = [];
for (var i = 0; i < input.length; i += spacing)
{
output[output.length] = input.slice(i, i + spacing);
}
return output;
}
class Actors extends React.Component {
constructor(props) {
super(props);
this.state = {
actors: [],
actorsGrouped: [],
numPages: 1,
activePage: 1,
resultsPerPage: 6,
orderBy: 'name',
orderDirection: 'asc',
q: {'order_by': [{"field": "name", "direction": "asc"}]}
}
this.handleSelectSort = this.handleSelectSort.bind(this);
this.handleSelectDirection = this.handleSelectDirection.bind(this);
this.handleSelect = this.handleSelect.bind(this);
this.updateItems = this.updateItems.bind(this);
}
componentDidMount() {
this.updateItems();
}
updateItems() {
axios.get('http://marvelus.me/api/actor', {
params: {
results_per_page: this.state.resultsPerPage,
page: this.state.activePage,
q: JSON.stringify(this.state.q),
}
}).then(res => {
this.state.numPages = res.data.total_pages;
const actors = res.data.objects.map(actor => actor);
this.setState({actors});
const actorsGrouped = splitarray(actors, 3)
this.state.actorsGrouped = actorsGrouped;
console.log(this.state.actorsGrouped);
});
}
handleSelect(eventKey) {
this.setState({
activePage: eventKey,
});
this.updateItems();
}
handleSelectSort(eventKey) {
// this.state.orderBy = eventKey;
this.state.q.order_by[0].field = eventKey;
this.updateItems()
}
handleSelectDirection(eventKey) {
// this.state.orderDirection = eventKey;
this.state.q.order_by[0].direction = eventKey;
this.updateItems();
}
renderDropdownButtonSortby(title, i) {
return (
<DropdownButton bsStyle="primary" title={title} key={"name"} id={'dropdown-basic-${i}'}
onSelect={this.handleSelectSort}>
<MenuItem eventKey="name">Name</MenuItem>
<MenuItem eventKey="birthday">Birthday</MenuItem>
</DropdownButton>
);
}
renderDropdownButtonSortDirection(title, i) {
return (
<DropdownButton bsStyle="primary" title={title} onSelect={this.handleSelectDirection}>
<MenuItem eventKey="asc">Ascending</MenuItem>
<MenuItem eventKey="desc">Descending</MenuItem>
</DropdownButton>
);
}
render() {
return (
<div className="container" styles="margin-top:100px;">
<div className="row">
<div className='text-center'>
{this.renderDropdownButtonSortby("Sort By: ", "name")}
{this.renderDropdownButtonSortDirection("Order", "")}
</div>
</div>
<div className="row">
{this.state.actorsGrouped[0]?
this.state.actorsGrouped[0].map(actor =>
<div className="col-sm-4">
<div className="panel panel-info">
<div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
className="img-responsive" style={imageStyles}
alt="Image"/></div>
</div>
</div>
):null}
<div className="row">
{this.state.actorsGrouped[1]?
this.state.actorsGrouped[1].map(actor =>
<div className="col-sm-4">
<div className="panel panel-info">
<div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
className="img-responsive" style={imageStyles}
alt="Image"/></div>
</div>
</div>
):null}
<div className='text-center'>
{!this.state.numPages
? null
: <Pagination
bsSize='large'
prev
next
first
last
ellipsis
boundaryLinks
items={this.state.numPages}
maxButtons={10}
activePage={this.state.activePage}
onSelect={this.handleSelect}/>
}
</div>
</div>
</div>
</div>
);
}
}
export default Actors
| fixed initial click
| src/Actors.js | fixed initial click | <ide><path>rc/Actors.js
<ide> class Actors extends React.Component {
<ide> constructor(props) {
<ide> super(props);
<del> this.state = {
<add> this.handleSelectSort = this.handleSelectSort.bind(this);
<add> this.handleSelectDirection = this.handleSelectDirection.bind(this);
<add> this.handleSelect = this.handleSelect.bind(this);
<add> this.updateItems = this.updateItems.bind(this);
<add>
<add> this.state = this.getInitialState();
<add> this.updateItems();
<add> }
<add>
<add> getInitialState() {
<add> return {
<ide> actors: [],
<ide> actorsGrouped: [],
<ide> numPages: 1,
<ide> orderBy: 'name',
<ide> orderDirection: 'asc',
<ide> q: {'order_by': [{"field": "name", "direction": "asc"}]}
<del> }
<del> this.handleSelectSort = this.handleSelectSort.bind(this);
<del> this.handleSelectDirection = this.handleSelectDirection.bind(this);
<del> this.handleSelect = this.handleSelect.bind(this);
<del> this.updateItems = this.updateItems.bind(this);
<add> };
<ide> }
<ide>
<del>
<del> componentDidMount() {
<del> this.updateItems();
<del> }
<add> // componentDidMount() {
<add> // this.updateItems();
<add> // }
<ide>
<ide> updateItems() {
<ide> axios.get('http://marvelus.me/api/actor', {
<ide> }).then(res => {
<ide> this.state.numPages = res.data.total_pages;
<ide> const actors = res.data.objects.map(actor => actor);
<del> this.setState({actors});
<add> //this.setState({actors});
<ide> const actorsGrouped = splitarray(actors, 3)
<del> this.state.actorsGrouped = actorsGrouped;
<add> this.setState({actorsGrouped});
<ide> console.log(this.state.actorsGrouped);
<ide> });
<ide> }
<ide>
<ide> handleSelect(eventKey) {
<ide>
<del> this.setState({
<del> activePage: eventKey,
<del> });
<add> this.state.activePage = eventKey;
<ide> this.updateItems();
<del>
<add>
<ide> }
<ide>
<ide> handleSelectSort(eventKey) {
<ide>
<ide>
<ide> render() {
<del>
<ide> return (
<ide>
<ide> <div className="container" styles="margin-top:100px;">
<ide> {this.renderDropdownButtonSortDirection("Order", "")}
<ide> </div>
<ide> </div>
<del> <div className="row">
<add>
<add> {this.state.actorsGrouped.length > 0 ?
<add> <div className="row">
<add> {this.state.actorsGrouped.map(actor =>
<add> <div className="col-sm-4">
<add> <div className="panel panel-info">
<add> <div className="panel-heading"><Link to={"/actor/" + actor.id}>{actor.name}</Link></div>
<add> <div className="panel-body"><img src={"https://image.tmdb.org/t/p/w640/" + actor.image}
<add> className="img-responsive" style={imageStyles}
<add> alt="Image"/></div>
<add> </div>
<add>
<add> )}
<add> </div>
<add> :null}
<ide> {this.state.actorsGrouped[0]?
<ide> this.state.actorsGrouped[0].map(actor =>
<ide> |
|
JavaScript | apache-2.0 | 196316c3d45fd24d62c098efd83057eea6a0e0b1 | 0 | mwaylabs/uikit,mwaylabs/uikit,mwaylabs/uikit | 'use strict';
angular.module('mwComponents', [])
/**
* @ngdoc directive
* @name mwComponents.directive:mwPanel
* @element div
* @description
*
* Wrapper directive for {@link http://getbootstrap.com/components/#panels Bootstraps Panel}.
*
* @param {string} mwPanel Panel title
* @example
* <doc:example>
* <doc:source>
* <div mw-panel="Panel title">
* Panel content
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwPanel', function () {
return {
restrict: 'A',
replace: true,
require: '^?dashboardModule',
scope: {
title: '@mwPanel'
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwPanel.html',
link: function (scope, elm, attr, ctrl) {
if (ctrl) {
scope.isDashboardModule = true;
scope.showCloseButton = ctrl.numberOfModules > 1;
scope.closeModule = ctrl.closeModule;
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwSortIndicator
* @element span
* @description
*
* Displays a sort indicator. Arrow up when sort is active and not reversed arrow down vise versa.
*
* @param {boolean} isActive display an arrow up or down when true otherwise an up and down arrow
* @param {boolean} isReversed display an arrow up or down
* @example
* <doc:example>
* <doc:source>
* <div mw-sort-indicator is-active="true" is-reversed="false"></div>
* </doc:source>
* </doc:example>
*/
.directive('mwSortIndicator', function () {
return {
restrict: 'A',
replace: true,
scope: {
isActive: '=',
isReversed: '='
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwSortIndicator.html'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwAlert
* @element div
* @description
*
* Wrapper directive for {@link http://getbootstrap.com/components/#alerts Bootstraps Alert}.
*
* @param {string} mwAlert Alert type. Can be one of the following:
*
* - warning
* - danger
* - success
* - info
*
* @example
* <doc:example>
* <doc:source>
* <div mw-alert="warning">
* Alert content
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwAlert', function () {
return {
restrict: 'A',
replace: true,
scope: {
type: '@mwAlert'
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwAlert.html'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwHeader
* @element div
* @description
*
* Header bar for content pages.
*
* @param {string} title Header title
*
* @example
* <doc:example>
* <doc:source>
* <div mw-header title="A nice page">
* Header content, Buttons etc...
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwHeader', function ($location, $route, $rootScope) {
return {
transclude: true,
scope: {
title: '@',
url: '@',
showBackButton: '@',
warningText: '@',
warningCondition: '='
},
templateUrl: 'modules/ui/templates/mwComponents/mwHeader.html',
link: function (scope, el, attrs, ctrl, $transclude) {
$rootScope.siteTitleDetails = scope.title;
$transclude(function (clone) {
if ((!clone || clone.length === 0) && !scope.showBackButton) {
el.find('.navbar-header').addClass('no-buttons');
}
});
scope.refresh = function () {
$route.reload();
};
scope.back = function () {
if (attrs.url) {
$location.path(attrs.url);
} else {
window.history.back();
}
};
if (scope.warningText) {
el.find('.header-popover').popover({
trigger: 'hover',
placement: 'bottom',
container: el.find('.popover-container')
});
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwIcon
* @element span
* @description
*
* Wrapper for bootstrap glyphicons.
*
* @param {string} mwIcon Glyphicon class suffix. Example suffix for 'glyphicon glyphicon-search' is 'search'
* @param {string} tooltip Optional string which will be displayed as a tooltip when hovering over the icon
*
* @example
* <doc:example>
* <doc:source>
* <span mw-icon="search"></span>
* <span mw-icon="search" tooltip="This is a tooltip"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwIcon', function ($compile) {
return {
restrict: 'A',
replace: true,
scope: {
mwIcon: '@',
placement: '@'
},
template: function (elm, attr) {
var isBootstrap = angular.isArray(attr.mwIcon.match(/^fa-/));
if (isBootstrap) {
return '<i class="fa {{mwIcon}}"></i>';
} else {
return '<span class="glyphicon glyphicon-{{mwIcon}}"></span>';
}
},
link: function (scope, el, attr) {
if (attr.tooltip) {
el.popover({
trigger: 'hover',
placement: 'bottom',
content: attr.tooltip,
container: 'body'
});
attr.$observe('tooltip', function (newVal) {
el.popover('destroy');
el.popover({
trigger: 'hover',
placement: scope.placement || 'bottom',
content: newVal,
container: 'body'
});
});
}
if(!scope.mwIcon){
scope.$watch('mwIcon', function(newVal){
if(newVal){
var template,
isBootstrap = angular.isArray(scope.mwIcon.match(/^fa-/));
if (isBootstrap) {
template = '<i class="fa {{mwIcon}}"></i>';
} else {
template = '<span class="glyphicon glyphicon-{{mwIcon}}"></span>';
}
el.replaceWith($compile(template)(scope));
}
});
}
}
};
})
/**
* @ngdoc directive
* @name Relution.Common.directive:rlnTooltip
* @element span
*
* @description
* Creates a tooltip element using Bootstraps popover component.
*
* @param {String} mwTooltip Content of the tooltip
*
* @example
<span mw-tooltip="foobar"></span>
*/
.directive('mwTooltip', function () {
return {
restrict: 'A',
scope: {
text: '@mwTooltip',
placement: '@'
},
replace: true,
template: '<span class="mw-tooltip"><span mw-icon="question-sign" tooltip="{{ text }}" placement="{{ placement }}"></span></span>',
compile: function (elm, attr) {
if (attr.mwTooltipIcon) {
elm.find('span').attr('mw-icon', attr.mwTooltipIcon);
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwBadge
* @element span
* @description
*
* Wrapper for bootstrap labels.
*
* @param {string} mwBadge label class suffix. Example: suffix for 'label label-info' is 'search'
*
* @example
* <doc:example>
* <doc:source>
* <span mw-badge="info"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwBadge', function () {
return {
restrict: 'A',
replace: true,
scope: { mwBadge: '@' },
transclude: true,
template: '<span class="mw-badge label label-{{mwBadge}}" ng-transclude></span>'
};
})
.directive('mwEmptyState', function () {
return {
restrict: 'A',
replace: true,
scope: { mwBadge: '@' },
transclude: true,
template: '<div class="mw-empty-state"> <img src="images/logo-grey.png"><h2 ng-transclude class="lead"></h2> </div>'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwFilterableSearch
* @element div
* @description
*
* Creates a search field to filter by in the sidebar. Search is triggered on keypress 'enter'.
*
* @param {filterable} filterable Filterable instance.
* @param {expression} disabled If expression evaluates to true, input is disabled.
* @param {string} property The name of the property on which the filtering should happen.
*/
.directive('mwFilterableSearch', function ($timeout, Loading, Detect) {
return {
transclude: true,
scope: {
filterable: '=',
mwDisabled: '=',
property: '@'
// loading: '='
},
templateUrl: 'modules/ui/templates/mwComponents/mwFilterableSearch.html',
link: function (scope) {
scope.model = scope.filterable.properties[scope.property];
scope.inputLength = 0;
scope.isMobile = Detect.isMobile();
var timeout;
var search = function () {
return scope.filterable.applyFilters();
};
var throttler = function () {
scope.searching = true;
$timeout.cancel(timeout);
timeout = $timeout(function () {
search().then(function () {
$timeout.cancel(timeout);
scope.searching = false;
}, function () {
scope.searching = false;
});
}, 500);
};
scope.search = function (event) {
if (!event || event.keyCode === 13) {
search();
} else {
if (!scope.isMobile) {
throttler();
}
}
};
scope.reset = function () {
scope.model.value = '';
search();
};
// Loading.registerDoneCallback(function(){
// scope.loading = false;
// });
//
// scope.loading = Loading.isLoading();
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwRating
* @element span
* @description
*
* Converts a rating number into stars
*
* @param {number | expression} mwRating rating score
* @param {number} max the maximun number of stars
*
* @example
* <doc:example>
* <doc:source>
* <span mw-rating="3"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwRating', function () {
return {
restrict: 'A',
scope: true,
template: '<i ng-repeat="star in stars" ng-class="star.state" class="fa"></i>',
link: function (scope, elm, attr) {
elm.addClass('mw-star-rating');
scope.stars = [];
var starsMax = scope.$eval(attr.max);
var buildStars = function (rating) {
scope.stars = [];
rating = (rating > starsMax) ? starsMax : rating;
rating = (rating < 0) ? 0 : rating;
for (var i = 0; i < Math.floor(rating); i++) {
scope.stars.push({state: 'fa-star'});
}
if (rating - Math.floor(rating) >= 0.5) {
scope.stars.push({state: 'fa-star-half-full'});
}
while (attr.max && scope.stars.length < starsMax) {
scope.stars.push({state: 'fa-star-o'});
}
};
attr.$observe('mwRating', function (value) {
buildStars(scope.$eval(value));
});
}
};
})
.directive('mwButtonHelp', function (i18n) {
return {
restrict: 'A',
scope: true,
link: function (scope, elm) {
var popup;
elm.addClass('mwButtonHelp');
var helpIcon = angular.element('<div>').addClass('help-icon glyphicon glyphicon-question-sign');
elm.prepend(helpIcon);
helpIcon.hover(function () {
buildPopup();
var targetOffset = angular.element(this).offset();
angular.element('body').append(popup);
popup.css('top', targetOffset.top - (popup.height() / 2) + 10 - angular.element(document).scrollTop());
popup.css('left', (targetOffset.left + 40));
}, function () {
angular.element('body > .mwButtonPopover').remove();
});
var buildPopup = function () {
popup = angular.element('<div>' + scope.helpText + '<ul></ul></div>').addClass('mwButtonPopover popover');
angular.forEach(scope.hintsToShow, function (hint) {
popup.find('ul').append('<li>' + hint.text + '</li>');
});
};
scope.$watch('hintsToShow', function (newVal) {
if (newVal.length) {
helpIcon.show();
} else {
helpIcon.hide();
}
});
scope.$on('$destroy', function () {
if (popup) {
popup.remove();
}
});
},
controller: function ($scope) {
$scope.registeredHints = [];
$scope.hintsToShow = [];
$scope.helpText = i18n.get('common.buttonHelp');
$scope.$on('i18n:localeChanged', function () {
$scope.helpText = i18n.get('common.buttonHelp');
});
var showHelp = function () {
$scope.hintsToShow = [];
angular.forEach($scope.registeredHints, function (registered) {
if (registered.condition) {
$scope.hintsToShow.push(registered);
}
});
};
//check if any condition changes
this.register = function (registered) {
$scope.$watch(function () {
return registered.condition;
}, showHelp);
$scope.registeredHints.push(registered);
};
}
};
})
.directive('mwButtonHelpCondition', function () {
return {
restrict: 'A',
require: '^mwButtonHelp',
scope: {
condition: '=mwButtonHelpCondition',
text: '@mwButtonHelpText'
},
link: function (scope, elm, attr, ctrl) {
ctrl.register(scope);
}
};
})
.directive('mwOptionGroup', function () {
return {
scope: {
title: '@',
description: '@',
mwDisabled: '='
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwOptionGroup.html',
link: function (scope, el) {
scope.randomId = _.uniqueId('option_group_');
el.find('input').attr('id', scope.randomId);
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwToggle
* @element span
* @description
*
* Displays a toggle button to toggle a boolean value
*
* @param {expression} mwModel model
* @param {function} mwChange the function which should be executed when the value has changed
*
*/
.directive('mwToggle', function ($timeout) {
return {
scope: {
mwModel: '=',
mwDisabled: '=',
mwChange: '&'
},
replace: true,
templateUrl: 'modules/ui/templates/mwComponents/mwToggle.html',
link: function (scope) {
scope.toggle = function (value) {
if (scope.mwModel !== value) {
scope.mwModel = !scope.mwModel;
$timeout(function () {
scope.mwChange({value: scope.mwModel});
});
}
};
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwTimeline
* @element div
* @description
*
* Vertical timeline Is the container element for timeline entries
*
*/
.directive('mwTimeline', function () {
return {
transclude: true,
replace: true,
template: '<div class="mw-timeline timeline clearfix"><hr class="vertical-line"><div class="content" ng-transclude></div></div>'
};
})
.directive('mwTimelineFieldset', function ($q) {
return {
scope: {
mwTitle: '@'
},
transclude: true,
replace: true,
templateUrl: 'modules/ui/templates/mwComponents/mwTimelineFieldset.html',
controller: function ($scope) {
$scope.entries = [];
this.register = function (entry) {
if (!_.findWhere($scope.entries, {$id: entry.$id})) {
$scope.entries.push(entry);
}
};
$scope.entriesVisible = true;
$scope.toggleEntries = function () {
var toggleEntryHideFns = [];
$scope.entries.forEach(function (entry) {
if ($scope.entriesVisible) {
toggleEntryHideFns.push(entry.hide());
} else {
toggleEntryHideFns.push(entry.show());
}
});
if (!$scope.entriesVisible) {
$scope.entriesVisible = !$scope.entriesVisible;
} else {
$q.all(toggleEntryHideFns).then(function () {
$scope.entriesVisible = !$scope.entriesVisible;
});
}
};
}
};
})
.directive('mwTimelineEntry', function ($q) {
return {
transclude: true,
replace: true,
template: '<li class="timeline-entry"><span class="bubble"></span><div ng-transclude></div></li>',
scope: true,
require: '^mwTimelineFieldset',
link: function (scope, el, attrs, mwTimelineFieldsetController) {
mwTimelineFieldsetController.register(scope);
scope.hide = function () {
var dfd = $q.defer();
el.fadeOut('slow', function () {
dfd.resolve();
});
return dfd.promise;
};
scope.show = function () {
var dfd = $q.defer();
el.fadeIn('slow', function () {
dfd.resolve();
});
return dfd.promise;
};
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwDraggable
* @description
*
* Offers drag and drop functionality on any element. Data can be set with the mwDragData parameter.
* The drop callback of the mwDroppable element will receive this data.
*
*/
.directive('mwDraggable', function ($timeout) {
return {
restrict: 'A',
scope: {
mwDragData: '=',
//We can not use camelcase because *-start is a reserved word from angular!
mwDragstart: '&',
mwDragend: '&',
mwDropEffect: '@'
},
link: function (scope, el) {
el.attr('draggable', true);
el.addClass('draggable', true);
if (scope.mwDragstart) {
el.on('dragstart', function (event) {
event.originalEvent.dataTransfer.setData('text', JSON.stringify(scope.mwDragData));
event.originalEvent.dataTransfer.effectAllowed = scope.mwDropEffect;
$timeout(function () {
scope.mwDragstart({event: event, dragData: scope.mwDragData});
});
});
}
el.on('dragend', function (event) {
if (scope.mwDragend) {
$timeout(function () {
scope.mwDragend({event: event});
});
}
});
}
};
})
.directive('mwDroppable', function ($timeout) {
return {
restrict: 'A',
scope: {
mwDropData: '=',
mwDragenter: '&',
mwDragleave: '&',
mwDragover: '&',
mwDrop: '&',
disableDrop: '='
},
link: function (scope, el) {
el.addClass('droppable');
var getDragData = function (event) {
var text = event.originalEvent.dataTransfer.getData('text');
if (text) {
return JSON.parse(text);
}
};
if (scope.mwDragenter) {
el.on('dragenter', function (event) {
if (scope.disableDrop !== true) {
el.addClass('drag-over');
}
$timeout(function () {
scope.mwDragenter({event: event});
});
});
}
if (scope.mwDragleave) {
el.on('dragleave', function (event) {
el.removeClass('drag-over');
$timeout(function () {
scope.mwDragleave({event: event});
});
});
}
if (scope.mwDrop) {
el.on('drop', function (event) {
el.removeClass('drag-over');
if (event.stopPropagation) {
event.stopPropagation(); // stops the browser executing other event listeners which are maybe deined in parent elements.
}
var data = getDragData(event);
$timeout(function () {
scope.mwDrop({
event: event,
dragData: data,
dropData: scope.mwDropData
});
});
return false;
});
}
// Necessary. Allows us to drop.
var handleDragOver = function (ev) {
if (scope.disableDrop !== true) {
if (ev.preventDefault) {
ev.preventDefault();
}
return false;
}
};
el.on('dragover', handleDragOver);
if (scope.mwDragover) {
el.on('dragover', function (event) {
$timeout(function () {
scope.mwDragover({event: event});
});
});
}
}
};
})
.directive('mwTextCollapse', function () {
return {
restrict: 'A',
scope: {
mwTextCollapse: '@',
length: '='
},
template: '<span>{{ mwTextCollapse | reduceStringTo:filterLength }}' +
' <a ng-if="showButton" ng-click="toggleLength()" style=\"cursor: pointer\">{{ ((_length !== filterLength) ? \'common.showLess\' : \'common.showMore\') | i18n}}</a></span>',
link: function (scope) {
var defaultLength = 200;
scope._length = scope.filterLength = (scope.length && typeof scope.length === 'number') ? scope.length : defaultLength;
scope.showButton = scope.mwTextCollapse.length > scope._length;
scope.toggleLength = function () {
scope.filterLength = (scope.filterLength !== scope._length) ? scope._length : undefined;
};
}
};
})
.directive('mwInfiniteScroll', function($window, $document) {
return {
restrict: 'A',
link: function(scope, el, attrs) {
var collection = scope.$eval(attrs.collection),
loading = false,
scrollFn,
scrollEl;
if(!collection || (collection && !collection.filterable)){
return;
}
var scrollCallback = function () {
if (!loading && scrollEl.scrollTop() >= ((d.height() - scrollEl.height()) - 100) && collection.filterable.hasNextPage()) {
loading = true;
collection.filterable.loadNextPage().then(function(){
loading = false;
});
}
};
var modalScrollCallback = function () {
if(!loading &&
collection.filterable.hasNextPage() &&
scrollEl[0].scrollHeight > 0 &&
(scrollEl[0].scrollHeight - scrollEl.scrollTop() - scrollEl[0].clientHeight < 2))
{
loading = true;
collection.filterable.loadNextPage().then(function(){
loading = false;
});
}
};
if(el.parents('.modal').length){
//element in modal
scrollEl = el.parents('.modal-body');
scrollFn = modalScrollCallback;
}
else {
//element in window
var d = angular.element($document);
scrollEl = angular.element($window);
scrollFn = scrollCallback;
}
// Register scroll callback
scrollEl.on('scroll', scrollFn);
// Deregister scroll callback if scope is destroyed
scope.$on('$destroy', function () {
scrollEl.off('scroll', scrollFn);
});
}
};
});
| mwComponents.js | 'use strict';
angular.module('mwComponents', [])
/**
* @ngdoc directive
* @name mwComponents.directive:mwPanel
* @element div
* @description
*
* Wrapper directive for {@link http://getbootstrap.com/components/#panels Bootstraps Panel}.
*
* @param {string} mwPanel Panel title
* @example
* <doc:example>
* <doc:source>
* <div mw-panel="Panel title">
* Panel content
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwPanel', function () {
return {
restrict: 'A',
replace: true,
require: '^?dashboardModule',
scope: {
title: '@mwPanel'
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwPanel.html',
link: function (scope, elm, attr, ctrl) {
if (ctrl) {
scope.isDashboardModule = true;
scope.showCloseButton = ctrl.numberOfModules > 1;
scope.closeModule = ctrl.closeModule;
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwSortIndicator
* @element span
* @description
*
* Displays a sort indicator. Arrow up when sort is active and not reversed arrow down vise versa.
*
* @param {boolean} isActive display an arrow up or down when true otherwise an up and down arrow
* @param {boolean} isReversed display an arrow up or down
* @example
* <doc:example>
* <doc:source>
* <div mw-sort-indicator is-active="true" is-reversed="false"></div>
* </doc:source>
* </doc:example>
*/
.directive('mwSortIndicator', function () {
return {
restrict: 'A',
replace: true,
scope: {
isActive: '=',
isReversed: '='
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwSortIndicator.html'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwAlert
* @element div
* @description
*
* Wrapper directive for {@link http://getbootstrap.com/components/#alerts Bootstraps Alert}.
*
* @param {string} mwAlert Alert type. Can be one of the following:
*
* - warning
* - danger
* - success
* - info
*
* @example
* <doc:example>
* <doc:source>
* <div mw-alert="warning">
* Alert content
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwAlert', function () {
return {
restrict: 'A',
replace: true,
scope: {
type: '@mwAlert'
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwAlert.html'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwHeader
* @element div
* @description
*
* Header bar for content pages.
*
* @param {string} title Header title
*
* @example
* <doc:example>
* <doc:source>
* <div mw-header title="A nice page">
* Header content, Buttons etc...
* </div>
* </doc:source>
* </doc:example>
*/
.directive('mwHeader', function ($location, $route, $rootScope) {
return {
transclude: true,
scope: {
title: '@',
url: '@',
showBackButton: '@',
warningText: '@',
warningCondition: '='
},
templateUrl: 'modules/ui/templates/mwComponents/mwHeader.html',
link: function (scope, el, attrs, ctrl, $transclude) {
$rootScope.siteTitleDetails = scope.title;
$transclude(function (clone) {
if ((!clone || clone.length === 0) && !scope.showBackButton) {
el.find('.navbar-header').addClass('no-buttons');
}
});
scope.refresh = function () {
$route.reload();
};
scope.back = function () {
if (attrs.url) {
$location.path(attrs.url);
} else {
window.history.back();
}
};
if (scope.warningText) {
el.find('.header-popover').popover({
trigger: 'hover',
placement: 'bottom',
container: el.find('.popover-container')
});
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwIcon
* @element span
* @description
*
* Wrapper for bootstrap glyphicons.
*
* @param {string} mwIcon Glyphicon class suffix. Example suffix for 'glyphicon glyphicon-search' is 'search'
* @param {string} tooltip Optional string which will be displayed as a tooltip when hovering over the icon
*
* @example
* <doc:example>
* <doc:source>
* <span mw-icon="search"></span>
* <span mw-icon="search" tooltip="This is a tooltip"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwIcon', function ($compile) {
return {
restrict: 'A',
replace: true,
scope: {
mwIcon: '@',
placement: '@'
},
template: function (elm, attr) {
var isBootstrap = angular.isArray(attr.mwIcon.match(/^fa-/));
if (isBootstrap) {
return '<i class="fa {{mwIcon}}"></i>';
} else {
return '<span class="glyphicon glyphicon-{{mwIcon}}"></span>';
}
},
link: function (scope, el, attr) {
if (attr.tooltip) {
el.popover({
trigger: 'hover',
placement: 'bottom',
content: attr.tooltip,
container: 'body'
});
attr.$observe('tooltip', function (newVal) {
el.popover('destroy');
el.popover({
trigger: 'hover',
placement: scope.placement || 'bottom',
content: newVal,
container: 'body'
});
});
}
if(!scope.mwIcon){
scope.$watch('mwIcon', function(newVal){
if(newVal){
var template,
isBootstrap = angular.isArray(scope.mwIcon.match(/^fa-/));
if (isBootstrap) {
template = '<i class="fa {{mwIcon}}"></i>';
} else {
template = '<span class="glyphicon glyphicon-{{mwIcon}}"></span>';
}
el.replaceWith($compile(template)(scope));
}
});
}
}
};
})
/**
* @ngdoc directive
* @name Relution.Common.directive:rlnTooltip
* @element span
*
* @description
* Creates a tooltip element using Bootstraps popover component.
*
* @param {String} mwTooltip Content of the tooltip
*
* @example
<span mw-tooltip="foobar"></span>
*/
.directive('mwTooltip', function () {
return {
restrict: 'A',
scope: {
text: '@mwTooltip',
placement: '@'
},
replace: true,
template: '<span class="mw-tooltip"><span mw-icon="question-sign" tooltip="{{ text }}" placement="{{ placement }}"></span></span>',
compile: function (elm, attr) {
if (attr.mwTooltipIcon) {
elm.find('span').attr('mw-icon', attr.mwTooltipIcon);
}
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwBadge
* @element span
* @description
*
* Wrapper for bootstrap labels.
*
* @param {string} mwBadge label class suffix. Example: suffix for 'label label-info' is 'search'
*
* @example
* <doc:example>
* <doc:source>
* <span mw-badge="info"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwBadge', function () {
return {
restrict: 'A',
replace: true,
scope: { mwBadge: '@' },
transclude: true,
template: '<span class="mw-badge label label-{{mwBadge}}" ng-transclude></span>'
};
})
.directive('mwEmptyState', function () {
return {
restrict: 'A',
replace: true,
scope: { mwBadge: '@' },
transclude: true,
template: '<div class="mw-empty-state"> <img src="images/logo-grey.png"><h2 ng-transclude class="lead"></h2> </div>'
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwFilterableSearch
* @element div
* @description
*
* Creates a search field to filter by in the sidebar. Search is triggered on keypress 'enter'.
*
* @param {filterable} filterable Filterable instance.
* @param {expression} disabled If expression evaluates to true, input is disabled.
* @param {string} property The name of the property on which the filtering should happen.
*/
.directive('mwFilterableSearch', function ($timeout, Loading, Detect) {
return {
transclude: true,
scope: {
filterable: '=',
mwDisabled: '=',
property: '@'
// loading: '='
},
templateUrl: 'modules/ui/templates/mwComponents/mwFilterableSearch.html',
link: function (scope) {
scope.model = scope.filterable.properties[scope.property];
scope.inputLength = 0;
scope.isMobile = Detect.isMobile();
var timeout;
var search = function () {
return scope.filterable.applyFilters();
};
var throttler = function () {
scope.searching = true;
$timeout.cancel(timeout);
timeout = $timeout(function () {
search().then(function () {
$timeout.cancel(timeout);
scope.searching = false;
}, function () {
scope.searching = false;
});
}, 500);
};
scope.search = function (event) {
if (!event || event.keyCode === 13) {
search();
} else {
if (!scope.isMobile) {
throttler();
}
}
};
scope.reset = function () {
scope.model.value = '';
search();
};
// Loading.registerDoneCallback(function(){
// scope.loading = false;
// });
//
// scope.loading = Loading.isLoading();
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwRating
* @element span
* @description
*
* Converts a rating number into stars
*
* @param {number | expression} mwRating rating score
* @param {number} max the maximun number of stars
*
* @example
* <doc:example>
* <doc:source>
* <span mw-rating="3"></span>
* </doc:source>
* </doc:example>
*/
.directive('mwRating', function () {
return {
restrict: 'A',
scope: true,
template: '<i ng-repeat="star in stars" ng-class="star.state" class="fa"></i>',
link: function (scope, elm, attr) {
elm.addClass('mw-star-rating');
scope.stars = [];
var starsMax = scope.$eval(attr.max);
var buildStars = function (rating) {
scope.stars = [];
rating = (rating > starsMax) ? starsMax : rating;
rating = (rating < 0) ? 0 : rating;
for (var i = 0; i < Math.floor(rating); i++) {
scope.stars.push({state: 'fa-star'});
}
if (rating - Math.floor(rating) >= 0.5) {
scope.stars.push({state: 'fa-star-half-full'});
}
while (attr.max && scope.stars.length < starsMax) {
scope.stars.push({state: 'fa-star-o'});
}
};
attr.$observe('mwRating', function (value) {
buildStars(scope.$eval(value));
});
}
};
})
.directive('mwButtonHelp', function (i18n) {
return {
restrict: 'A',
scope: true,
link: function (scope, elm) {
var popup;
elm.addClass('mwButtonHelp');
var helpIcon = angular.element('<div>').addClass('help-icon glyphicon glyphicon-question-sign');
elm.prepend(helpIcon);
helpIcon.hover(function () {
buildPopup();
var targetOffset = angular.element(this).offset();
angular.element('body').append(popup);
popup.css('top', targetOffset.top - (popup.height() / 2) + 10 - angular.element(document).scrollTop());
popup.css('left', (targetOffset.left + 40));
}, function () {
angular.element('body > .mwButtonPopover').remove();
});
var buildPopup = function () {
popup = angular.element('<div>' + scope.helpText + '<ul></ul></div>').addClass('mwButtonPopover popover');
angular.forEach(scope.hintsToShow, function (hint) {
popup.find('ul').append('<li>' + hint.text + '</li>');
});
};
scope.$watch('hintsToShow', function (newVal) {
if (newVal.length) {
helpIcon.show();
} else {
helpIcon.hide();
}
});
scope.$on('$destroy', function () {
if (popup) {
popup.remove();
}
});
},
controller: function ($scope) {
$scope.registeredHints = [];
$scope.hintsToShow = [];
$scope.helpText = i18n.get('common.buttonHelp');
$scope.$on('i18n:localeChanged', function () {
$scope.helpText = i18n.get('common.buttonHelp');
});
var showHelp = function () {
$scope.hintsToShow = [];
angular.forEach($scope.registeredHints, function (registered) {
if (registered.condition) {
$scope.hintsToShow.push(registered);
}
});
};
//check if any condition changes
this.register = function (registered) {
$scope.$watch(function () {
return registered.condition;
}, showHelp);
$scope.registeredHints.push(registered);
};
}
};
})
.directive('mwButtonHelpCondition', function () {
return {
restrict: 'A',
require: '^mwButtonHelp',
scope: {
condition: '=mwButtonHelpCondition',
text: '@mwButtonHelpText'
},
link: function (scope, elm, attr, ctrl) {
ctrl.register(scope);
}
};
})
.directive('mwOptionGroup', function () {
return {
scope: {
title: '@',
description: '@',
mwDisabled: '='
},
transclude: true,
templateUrl: 'modules/ui/templates/mwComponents/mwOptionGroup.html',
link: function (scope, el) {
scope.randomId = _.uniqueId('option_group_');
el.find('input').attr('id', scope.randomId);
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwToggle
* @element span
* @description
*
* Displays a toggle button to toggle a boolean value
*
* @param {expression} mwModel model
* @param {function} mwChange the function which should be executed when the value has changed
*
*/
.directive('mwToggle', function ($timeout) {
return {
scope: {
mwModel: '=',
mwDisabled: '=',
mwChange: '&'
},
replace: true,
templateUrl: 'modules/ui/templates/mwComponents/mwToggle.html',
link: function (scope) {
scope.toggle = function (value) {
if (scope.mwModel !== value) {
scope.mwModel = !scope.mwModel;
$timeout(function () {
scope.mwChange({value: scope.mwModel});
});
}
};
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwTimeline
* @element div
* @description
*
* Vertical timeline Is the container element for timeline entries
*
*/
.directive('mwTimeline', function () {
return {
transclude: true,
replace: true,
template: '<div class="mw-timeline timeline clearfix"><hr class="vertical-line"><div class="content" ng-transclude></div></div>'
};
})
.directive('mwTimelineFieldset', function ($q) {
return {
scope: {
mwTitle: '@'
},
transclude: true,
replace: true,
templateUrl: 'modules/ui/templates/mwComponents/mwTimelineFieldset.html',
controller: function ($scope) {
$scope.entries = [];
this.register = function (entry) {
if (!_.findWhere($scope.entries, {$id: entry.$id})) {
$scope.entries.push(entry);
}
};
$scope.entriesVisible = true;
$scope.toggleEntries = function () {
var toggleEntryHideFns = [];
$scope.entries.forEach(function (entry) {
if ($scope.entriesVisible) {
toggleEntryHideFns.push(entry.hide());
} else {
toggleEntryHideFns.push(entry.show());
}
});
if (!$scope.entriesVisible) {
$scope.entriesVisible = !$scope.entriesVisible;
} else {
$q.all(toggleEntryHideFns).then(function () {
$scope.entriesVisible = !$scope.entriesVisible;
});
}
};
}
};
})
.directive('mwTimelineEntry', function ($q) {
return {
transclude: true,
replace: true,
template: '<li class="timeline-entry"><span class="bubble"></span><div ng-transclude></div></li>',
scope: true,
require: '^mwTimelineFieldset',
link: function (scope, el, attrs, mwTimelineFieldsetController) {
mwTimelineFieldsetController.register(scope);
scope.hide = function () {
var dfd = $q.defer();
el.fadeOut('slow', function () {
dfd.resolve();
});
return dfd.promise;
};
scope.show = function () {
var dfd = $q.defer();
el.fadeIn('slow', function () {
dfd.resolve();
});
return dfd.promise;
};
}
};
})
/**
* @ngdoc directive
* @name mwComponents.directive:mwDraggable
* @description
*
* Offers drag and drop functionality on any element. Data can be set with the mwDragData parameter.
* The drop callback of the mwDroppable element will receive this data.
*
*/
.directive('mwDraggable', function ($timeout) {
return {
restrict: 'A',
scope: {
mwDragData: '=',
//We can not use camelcase because foo-start is a reserved word from angular!
mwDragstart: '&',
mwDragend: '&',
mwDropEffect: '@'
},
link: function (scope, el) {
el.attr('draggable', true);
el.addClass('draggable', true);
if (scope.mwDragstart) {
el.on('dragstart', function (event) {
event.originalEvent.dataTransfer.setData('text', JSON.stringify(scope.mwDragData));
event.originalEvent.dataTransfer.effectAllowed = scope.mwDropEffect;
$timeout(function () {
scope.mwDragstart({event: event, dragData: scope.mwDragData});
});
});
}
el.on('dragend', function (event) {
if (scope.mwDragend) {
$timeout(function () {
scope.mwDragend({event: event});
});
}
});
}
};
})
.directive('mwDroppable', function ($timeout) {
return {
restrict: 'A',
scope: {
mwDropData: '=',
mwDragenter: '&',
mwDragleave: '&',
mwDragover: '&',
mwDrop: '&',
disableDrop: '='
},
link: function (scope, el) {
el.addClass('droppable');
var getDragData = function (event) {
var text = event.originalEvent.dataTransfer.getData('text');
if (text) {
return JSON.parse(text);
}
};
if (scope.mwDragenter) {
el.on('dragenter', function (event) {
if (scope.disableDrop !== true) {
el.addClass('drag-over');
}
$timeout(function () {
scope.mwDragenter({event: event});
});
});
}
if (scope.mwDragleave) {
el.on('dragleave', function (event) {
el.removeClass('drag-over');
$timeout(function () {
scope.mwDragleave({event: event});
});
});
}
if (scope.mwDrop) {
el.on('drop', function (event) {
el.removeClass('drag-over');
if (event.stopPropagation) {
event.stopPropagation(); // stops the browser executing other event listeners which are maybe deined in parent elements.
}
var data = getDragData(event);
$timeout(function () {
scope.mwDrop({
event: event,
dragData: data,
dropData: scope.mwDropData
});
});
return false;
});
}
// Necessary. Allows us to drop.
var handleDragOver = function (ev) {
if (scope.disableDrop !== true) {
if (ev.preventDefault) {
ev.preventDefault();
}
return false;
}
};
el.on('dragover', handleDragOver);
if (scope.mwDragover) {
el.on('dragover', function (event) {
$timeout(function () {
scope.mwDragover({event: event});
});
});
}
}
};
})
.directive('mwTextCollapse', function () {
return {
restrict: 'A',
scope: {
mwTextCollapse: '@',
length: '='
},
template: '<span>{{ mwTextCollapse | reduceStringTo:filterLength }}' +
' <a ng-if="showButton" ng-click="toggleLength()" style=\"cursor: pointer\">{{ ((_length !== filterLength) ? \'common.showLess\' : \'common.showMore\') | i18n}}</a></span>',
link: function (scope) {
var defaultLength = 200;
scope._length = scope.filterLength = (scope.length && typeof scope.length === 'number') ? scope.length : defaultLength;
scope.showButton = scope.mwTextCollapse.length > scope._length;
scope.toggleLength = function () {
scope.filterLength = (scope.filterLength !== scope._length) ? scope._length : undefined;
};
}
};
})
.directive('mwInfiniteScroll', function($window, $document) {
return {
restrict: 'A',
link: function(scope, el, attrs) {
var collection = scope.$eval(attrs.collection),
loading = false,
scrollFn,
scrollEl;
if(!collection || (collection && !collection.filterable)){
return;
}
var scrollCallback = function () {
if (!loading && scrollEl.scrollTop() >= ((d.height() - scrollEl.height()) - 100) && collection.filterable.hasNextPage()) {
loading = true;
collection.filterable.loadNextPage().then(function(){
loading = false;
});
}
};
var modalScrollCallback = function () {
if(!loading &&
collection.filterable.hasNextPage() &&
scrollEl[0].scrollHeight > 0 &&
(scrollEl[0].scrollHeight - scrollEl.scrollTop() - scrollEl[0].clientHeight < 2))
{
loading = true;
collection.filterable.loadNextPage().then(function(){
loading = false;
});
}
};
if(el.parents('.modal').length){
//element in modal
scrollEl = el.parents('.modal-body');
scrollFn = modalScrollCallback;
}
else {
//element in window
var d = angular.element($document);
scrollEl = angular.element($window);
scrollFn = scrollCallback;
}
// Register scroll callback
scrollEl.on('scroll', scrollFn);
// Deregister scroll callback if scope is destroyed
scope.$on('$destroy', function () {
scrollEl.off('scroll', scrollFn);
});
}
};
});
| changed comment
| mwComponents.js | changed comment | <ide><path>wComponents.js
<ide> restrict: 'A',
<ide> scope: {
<ide> mwDragData: '=',
<del> //We can not use camelcase because foo-start is a reserved word from angular!
<add> //We can not use camelcase because *-start is a reserved word from angular!
<ide> mwDragstart: '&',
<ide> mwDragend: '&',
<ide> mwDropEffect: '@' |
|
Java | mit | b77631535ef8d5d03a229c999b2245f0d4705f5e | 0 | Ziver/zutil,Ziver/zutil,Ziver/zutil | package zutil.net;
import zutil.osal.MultiCommandExecutor;
import zutil.osal.OSAbstractionLayer;
import java.io.*;
import java.net.Inet4Address;
import java.net.InetAddress;
/**
* This class is a IPv4 scanner, it will scan a
* range of IPs to check if they are available.
* Note that this class uses the platform specific
* ping executable to check for availability.
*/
public class InetScanner {
private static final int TIMEOUT_MS = 50;
private InetScanListener listener;
private boolean canceled;
public void setListener(InetScanListener listener){
this.listener = listener;
}
/**
* Starts scanning a /24 ip range. This method will block until the scan is finished
*
* @param ip the network ip address
*/
public synchronized void scan(InetAddress ip){
canceled = false;
MultiCommandExecutor exec = new MultiCommandExecutor();
String netAddr = ip.getHostAddress().substring(0, ip.getHostAddress().lastIndexOf('.')+1);
try{
for (int i = 1; i < 255 && !canceled; i++) {
try {
String targetIp = netAddr+i;
exec.exec(platformPingCmd(targetIp));
boolean online = false;
for (String line; (line=exec.readLine()) != null;) {
if (platformPingCheck(line))
online = true;
}
if (online && listener != null)
listener.foundInetAddress(InetAddress.getByName(targetIp));
} catch (IOException e) {
e.printStackTrace();
}
}
}
catch (Exception e) {
e.printStackTrace();
} finally {
exec.close();
}
}
/**
* Cancels the ongoing ip scan
*/
public void cancel(){
canceled = true;
}
/**
* Will check if the given IP is reachable (Pingable)
*/
public static boolean isReachable(InetAddress ip){
String[] output = OSAbstractionLayer.exec(platformPingCmd(ip.getHostAddress()));
boolean online = false;
for (String line : output) {
if (platformPingCheck(line))
online = true;
}
return online;
}
private static String platformPingCmd(String ip){
switch (OSAbstractionLayer.getInstance().getOSType()){
case Windows:
return "ping -n 1 -w "+ TIMEOUT_MS +" " + ip;
case Linux:
case MacOS:
return "ping -c 1 -W "+ TIMEOUT_MS +" " + ip;
default:
return null;
}
}
private static boolean platformPingCheck(String line){
return line.contains("TTL=") || line.contains("ttl=");
}
public interface InetScanListener {
void foundInetAddress(InetAddress ip);
}
}
| src/zutil/net/InetScanner.java | package zutil.net;
import zutil.osal.MultiCommandExecutor;
import zutil.osal.OSAbstractionLayer;
import java.io.*;
/**
* Created by Ziver on 2016-09-11.
*/
public class InetScanner {
private static final int TIMEOUT_MS = 50;
public static void main(String[] args){
//scan();
scan2();
}
public static void scan(){
for (int i = 1; i < 255; i++) {
String ip = "192.168.1."+i;
System.out.println(ip+": "+isReachableByPing(ip));
}
}
public static boolean isReachableByPing(String host) {
try{
String[] output = OSAbstractionLayer.exec(getPlatformPingCmd(host));
if (output[2].contains("TTL=") || output[2].contains("ttl="))
return true;
} catch( Exception e ) {
e.printStackTrace();
}
return false;
}
public static void scan2(){
try{
MultiCommandExecutor exec = new MultiCommandExecutor();
// execute the desired command (here: ls) n times
for (int i = 1; i < 255; i++) {
try {
String ip = "192.168.1."+i;
exec.exec(getPlatformPingCmd(ip));
System.out.print(ip+": ");
boolean online = false;
for (String line; (line=exec.readLine()) != null;) {
if (line.contains("TTL=") || line.contains("ttl="))
online = true;
}
System.out.println(online);
}
catch (IOException e) {
System.out.println(e);
}
}
exec.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
private static String getPlatformPingCmd(String ip){
switch (OSAbstractionLayer.getInstance().getOSType()){
case Windows:
return "ping -n 1 -w "+ TIMEOUT_MS +" " + ip;
case Linux:
case MacOS:
return "ping -c 1 -W "+ TIMEOUT_MS +" " + ip;
default:
return null;
}
}
}
| Implemented IP Scan api
| src/zutil/net/InetScanner.java | Implemented IP Scan api | <ide><path>rc/zutil/net/InetScanner.java
<ide> import zutil.osal.OSAbstractionLayer;
<ide>
<ide> import java.io.*;
<add>import java.net.Inet4Address;
<add>import java.net.InetAddress;
<ide>
<ide> /**
<del> * Created by Ziver on 2016-09-11.
<add> * This class is a IPv4 scanner, it will scan a
<add> * range of IPs to check if they are available.
<add> * Note that this class uses the platform specific
<add> * ping executable to check for availability.
<ide> */
<ide> public class InetScanner {
<ide> private static final int TIMEOUT_MS = 50;
<ide>
<add> private InetScanListener listener;
<add> private boolean canceled;
<ide>
<del> public static void main(String[] args){
<del> //scan();
<del> scan2();
<add>
<add> public void setListener(InetScanListener listener){
<add> this.listener = listener;
<ide> }
<ide>
<ide>
<del> public static void scan(){
<del> for (int i = 1; i < 255; i++) {
<del> String ip = "192.168.1."+i;
<del> System.out.println(ip+": "+isReachableByPing(ip));
<add> /**
<add> * Starts scanning a /24 ip range. This method will block until the scan is finished
<add> *
<add> * @param ip the network ip address
<add> */
<add> public synchronized void scan(InetAddress ip){
<add> canceled = false;
<add> MultiCommandExecutor exec = new MultiCommandExecutor();
<add> String netAddr = ip.getHostAddress().substring(0, ip.getHostAddress().lastIndexOf('.')+1);
<add>
<add> try{
<add> for (int i = 1; i < 255 && !canceled; i++) {
<add> try {
<add> String targetIp = netAddr+i;
<add> exec.exec(platformPingCmd(targetIp));
<add>
<add> boolean online = false;
<add> for (String line; (line=exec.readLine()) != null;) {
<add> if (platformPingCheck(line))
<add> online = true;
<add> }
<add> if (online && listener != null)
<add> listener.foundInetAddress(InetAddress.getByName(targetIp));
<add> } catch (IOException e) {
<add> e.printStackTrace();
<add> }
<add> }
<add> }
<add> catch (Exception e) {
<add> e.printStackTrace();
<add> } finally {
<add> exec.close();
<ide> }
<ide> }
<del> public static boolean isReachableByPing(String host) {
<del> try{
<del> String[] output = OSAbstractionLayer.exec(getPlatformPingCmd(host));
<del> if (output[2].contains("TTL=") || output[2].contains("ttl="))
<del> return true;
<ide>
<del> } catch( Exception e ) {
<del> e.printStackTrace();
<del> }
<del> return false;
<add> /**
<add> * Cancels the ongoing ip scan
<add> */
<add> public void cancel(){
<add> canceled = true;
<ide> }
<ide>
<ide>
<del> public static void scan2(){
<del> try{
<del> MultiCommandExecutor exec = new MultiCommandExecutor();
<del> // execute the desired command (here: ls) n times
<del> for (int i = 1; i < 255; i++) {
<del> try {
<del> String ip = "192.168.1."+i;
<del> exec.exec(getPlatformPingCmd(ip));
<add> /**
<add> * Will check if the given IP is reachable (Pingable)
<add> */
<add> public static boolean isReachable(InetAddress ip){
<add> String[] output = OSAbstractionLayer.exec(platformPingCmd(ip.getHostAddress()));
<ide>
<del> System.out.print(ip+": ");
<del> boolean online = false;
<del> for (String line; (line=exec.readLine()) != null;) {
<del> if (line.contains("TTL=") || line.contains("ttl="))
<del> online = true;
<del> }
<del> System.out.println(online);
<del> }
<del> catch (IOException e) {
<del> System.out.println(e);
<del> }
<del> }
<del>
<del> exec.close();
<add> boolean online = false;
<add> for (String line : output) {
<add> if (platformPingCheck(line))
<add> online = true;
<ide> }
<del> catch (Exception e) {
<del> e.printStackTrace();
<del> }
<add> return online;
<ide> }
<ide>
<del> private static String getPlatformPingCmd(String ip){
<add>
<add> private static String platformPingCmd(String ip){
<ide> switch (OSAbstractionLayer.getInstance().getOSType()){
<ide> case Windows:
<ide> return "ping -n 1 -w "+ TIMEOUT_MS +" " + ip;
<ide> return null;
<ide> }
<ide> }
<add> private static boolean platformPingCheck(String line){
<add> return line.contains("TTL=") || line.contains("ttl=");
<add> }
<add>
<add>
<add>
<add> public interface InetScanListener {
<add> void foundInetAddress(InetAddress ip);
<add> }
<ide> } |
|
Java | mit | 91ae6cf43e3245647d49d601ea11ea585639c9e9 | 0 | CruGlobal/android-gto-support,CruGlobal/android-gto-support,GlobalTechnology/android-gto-support | package org.ccci.gto.android.common.jsonapi;
import android.support.test.runner.AndroidJUnit4;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiAttribute;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiId;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiType;
import org.ccci.gto.android.common.jsonapi.model.JsonApiObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodeAbsent;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodePresent;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonPartEquals;
import static net.javacrumbs.jsonunit.core.Option.IGNORING_EXTRA_FIELDS;
import static net.javacrumbs.jsonunit.fluent.JsonFluentAssert.assertThatJson;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
@RunWith(AndroidJUnit4.class)
public class JsonApiConverterIT {
@Test(expected = IllegalArgumentException.class)
public void verifyConverterNoType() throws Exception {
new JsonApiConverter.Builder().addClasses(ModelNoType.class).build();
}
@Test(expected = IllegalArgumentException.class)
public void verifyConverterDuplicateTypes() throws Exception {
new JsonApiConverter.Builder().addClasses(ModelDuplicateType1.class, ModelDuplicateType2.class).build();
}
@Test
public void verifySupports() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelSimple.class, ModelAttributes.class).build();
assertThat(converter.supports(ModelSimple.class), is(true));
assertThat(converter.supports(ModelAttributes.class), is(true));
assertThat(converter.supports(Object.class), is(false));
}
@Test
public void verifyToJsonSimple() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelSimple.class).build();
final ModelSimple obj0 = new ModelSimple(99);
final String json = converter.toJson(JsonApiObject.single(obj0));
assertThatJson(json).node("data").isObject();
assertThat(json, jsonPartEquals("data.type", ModelSimple.TYPE));
assertThat(json, jsonPartEquals("data.id", obj0.mId));
final ModelSimple obj1 = new ModelSimple(42);
final String json2 = converter.toJson(JsonApiObject.of(obj0, obj1));
assertThatJson(json2).node("data").isArray();
assertThat(json2, jsonPartEquals("data[0].type", ModelSimple.TYPE));
assertThat(json2, jsonPartEquals("data[0].id", obj0.mId));
assertThat(json2, jsonPartEquals("data[1].type", ModelSimple.TYPE));
assertThat(json2, jsonPartEquals("data[1].id", obj1.mId));
}
@Test
public void verifyToJsonSingleResourceNull() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().build();
final String json = converter.toJson(JsonApiObject.single(null));
assertThat(json, jsonNodePresent("data"));
assertThatJson(json).node("data").isEqualTo(null);
}
@Test
public void verifyToJsonAttributes() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelAttributes.class).build();
final String json = converter.toJson(JsonApiObject.single(new ModelAttributes()));
assertThatJson(json).node("data").isObject();
assertThatJson(json).node("data.attributes").isObject();
assertThat(json, jsonPartEquals("data.type", ModelAttributes.TYPE));
assertThat(json, jsonPartEquals("data.attributes.attrStr1", "attrStr1"));
assertThat(json, jsonPartEquals("data.attributes.attrInt1", 1));
assertThat(json, jsonPartEquals("data.attributes.attrBool1", true));
assertThat(json, jsonPartEquals("data.attributes.attrAnn1", "attrAnn1"));
assertThat(json, jsonPartEquals("data.attributes.attrAnn2", "attrAnn2"));
assertThat(json, allOf(jsonNodeAbsent("data.attributes.transientAttr"),
jsonNodeAbsent("data.attributes.staticAttr")));
}
@Test
public void verifyToJsonRelationships() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelParent.class, ModelChild.class).build();
final ModelParent parent = new ModelParent();
parent.mId = 1;
parent.favorite = new ModelChild("Daniel");
parent.favorite.mId = 11;
parent.children.add(parent.favorite);
final ModelChild child2 = new ModelChild("Hey You");
child2.mId = 20;
parent.children.add(child2);
final String json = converter.toJson(JsonApiObject.single(parent));
assertThatJson(json).node("data").isObject();
assertThat(json, jsonPartEquals("data.type", ModelParent.TYPE));
assertThat(json, jsonNodeAbsent("data.attributes.favorite"));
assertThat(json, jsonNodeAbsent("data.attributes.children"));
assertThat(json, jsonPartEquals("data.relationships.favorite.type", ModelChild.TYPE));
assertThat(json, jsonPartEquals("data.relationships.favorite.id", parent.favorite.mId));
assertThat(json, jsonNodeAbsent("data.relationships.favorite.attributes"));
assertThatJson(json).node("data.relationships.children").isArray().ofLength(2);
assertThatJson(json).node("included").isArray().ofLength(2);
assertThatJson(json).node("included").matches(
hasItem(jsonEquals("{type:'child',id:11,attributes:{name:'Daniel'}}").when(IGNORING_EXTRA_FIELDS)));
assertThatJson(json).node("included").matches(
hasItem(jsonEquals("{type:'child',id:20,attributes:{name:'Hey You'}}").when(IGNORING_EXTRA_FIELDS)));
}
@Test
public void verifyFromJsonSimple() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelSimple.class).build();
final ModelSimple source = new ModelSimple(99);
final JsonApiObject<ModelSimple> output =
converter.fromJson(converter.toJson(JsonApiObject.single(source)), ModelSimple.class);
assertThat(output.isSingle(), is(true));
assertThat(output.getDataSingle(), is(not(nullValue())));
assertThat(output.getDataSingle().mId, is(99));
}
@Test
public void verifyFromJsonAttributes() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelAttributes.class).build();
final ModelAttributes source = new ModelAttributes();
source.mId = 19;
source.transientAttr = "tneisnart";
source.attrStr1 = "1rtSrtta";
source.attrInt1 = 2;
source.attrBool1 = false;
source.attrAnn1 = "1nnArtta";
source.ann2 = "2nnArtta";
final JsonApiObject<ModelAttributes> output =
converter.fromJson(converter.toJson(JsonApiObject.single(source)), ModelAttributes.class);
assertThat(output.isSingle(), is(true));
final ModelAttributes target = output.getDataSingle();
assertThat(target, is(not(nullValue())));
assertThat(target.mId, is(source.mId));
assertThat(target.transientAttr, is("transient"));
assertThat(target.finalAttr, is("final"));
assertThat(target.attrStr1, is(source.attrStr1));
assertThat(target.attrInt1, is(source.attrInt1));
assertThat(target.attrBool1, is(source.attrBool1));
assertThat(target.attrAnn1, is(source.attrAnn1));
assertThat(target.ann2, is(source.ann2));
}
@Test
public void verifyFromJsonRelationships() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelParent.class, ModelChild.class).build();
final ModelParent parent = new ModelParent();
parent.mId = 1;
parent.favorite = new ModelChild();
parent.favorite.mId = 11;
parent.children.add(parent.favorite);
final ModelChild child2 = new ModelChild();
child2.mId = 20;
parent.children.add(child2);
final JsonApiObject<ModelParent> output =
converter.fromJson(converter.toJson(JsonApiObject.single(parent)), ModelParent.class);
assertThat(output.isSingle(), is(true));
final ModelParent target = output.getDataSingle();
assertThat(target, is(not(nullValue())));
assertThat(target.mId, is(parent.mId));
assertThat(target.favorite, is(not(nullValue())));
assertThat(target.favorite.mId, is(parent.favorite.mId));
}
public static final class ModelNoType {}
@JsonApiType("type")
public static final class ModelDuplicateType1 {}
@JsonApiType("type")
public static final class ModelDuplicateType2 {}
public abstract static class ModelBase {
@JsonApiId
int mId;
}
@JsonApiType(ModelSimple.TYPE)
public static final class ModelSimple extends ModelBase {
static final String TYPE = "simple";
public ModelSimple() {}
public ModelSimple(final int id) {
mId = id;
}
}
@JsonApiType(ModelAttributes.TYPE)
public static final class ModelAttributes extends ModelBase {
static final String TYPE = "attrs";
transient String transientAttr = "transient";
static String staticAttr = "static";
final String finalAttr = "final";
private String attrStr1 = "attrStr1";
public int attrInt1 = 1;
boolean attrBool1 = true;
@JsonApiAttribute
String attrAnn1 = "attrAnn1";
@JsonApiAttribute(name = "attrAnn2")
String ann2 = "attrAnn2";
}
@JsonApiType(ModelParent.TYPE)
public static final class ModelParent extends ModelBase {
static final String TYPE = "parent";
List<ModelChild> children = new ArrayList<>();
// everyone has a favorite child
ModelChild favorite;
}
@JsonApiType(ModelChild.TYPE)
public static final class ModelChild extends ModelBase {
static final String TYPE = "child";
String name;
public ModelChild() {}
public ModelChild(final String name) {
this.name = name;
}
}
}
| gto-support-jsonapi/src/androidTest/java/org/ccci/gto/android/common/jsonapi/JsonApiConverterIT.java | package org.ccci.gto.android.common.jsonapi;
import android.support.test.runner.AndroidJUnit4;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiAttribute;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiId;
import org.ccci.gto.android.common.jsonapi.annotation.JsonApiType;
import org.ccci.gto.android.common.jsonapi.model.JsonApiObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.ArrayList;
import java.util.List;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodeAbsent;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodePresent;
import static net.javacrumbs.jsonunit.JsonMatchers.jsonPartEquals;
import static net.javacrumbs.jsonunit.fluent.JsonFluentAssert.assertThatJson;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
@RunWith(AndroidJUnit4.class)
public class JsonApiConverterIT {
@Test(expected = IllegalArgumentException.class)
public void verifyConverterNoType() throws Exception {
new JsonApiConverter.Builder().addClasses(ModelNoType.class).build();
}
@Test(expected = IllegalArgumentException.class)
public void verifyConverterDuplicateTypes() throws Exception {
new JsonApiConverter.Builder().addClasses(ModelDuplicateType1.class, ModelDuplicateType2.class).build();
}
@Test
public void verifySupports() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelSimple.class, ModelAttributes.class).build();
assertThat(converter.supports(ModelSimple.class), is(true));
assertThat(converter.supports(ModelAttributes.class), is(true));
assertThat(converter.supports(Object.class), is(false));
}
@Test
public void verifyToJsonSimple() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelSimple.class).build();
final ModelSimple obj0 = new ModelSimple(99);
final String json = converter.toJson(JsonApiObject.single(obj0));
assertThatJson(json).node("data").isObject();
assertThat(json, jsonPartEquals("data.type", ModelSimple.TYPE));
assertThat(json, jsonPartEquals("data.id", obj0.mId));
final ModelSimple obj1 = new ModelSimple(42);
final String json2 = converter.toJson(JsonApiObject.of(obj0, obj1));
assertThatJson(json2).node("data").isArray();
assertThat(json2, jsonPartEquals("data[0].type", ModelSimple.TYPE));
assertThat(json2, jsonPartEquals("data[0].id", obj0.mId));
assertThat(json2, jsonPartEquals("data[1].type", ModelSimple.TYPE));
assertThat(json2, jsonPartEquals("data[1].id", obj1.mId));
}
@Test
public void verifyToJsonSingleResourceNull() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().build();
final String json = converter.toJson(JsonApiObject.single(null));
assertThat(json, jsonNodePresent("data"));
assertThatJson(json).node("data").isEqualTo(null);
}
@Test
public void verifyToJsonAttributes() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelAttributes.class).build();
final String json = converter.toJson(JsonApiObject.single(new ModelAttributes()));
assertThatJson(json).node("data").isObject();
assertThatJson(json).node("data.attributes").isObject();
assertThat(json, jsonPartEquals("data.type", ModelAttributes.TYPE));
assertThat(json, jsonPartEquals("data.attributes.attrStr1", "attrStr1"));
assertThat(json, jsonPartEquals("data.attributes.attrInt1", 1));
assertThat(json, jsonPartEquals("data.attributes.attrBool1", true));
assertThat(json, jsonPartEquals("data.attributes.attrAnn1", "attrAnn1"));
assertThat(json, jsonPartEquals("data.attributes.attrAnn2", "attrAnn2"));
assertThat(json, allOf(jsonNodeAbsent("data.attributes.transientAttr"),
jsonNodeAbsent("data.attributes.staticAttr")));
}
@Test
public void verifyToJsonRelationships() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelParent.class, ModelChild.class).build();
final ModelParent parent = new ModelParent();
parent.mId = 1;
parent.favorite = new ModelChild();
parent.favorite.mId = 11;
parent.children.add(parent.favorite);
final ModelChild child2 = new ModelChild();
child2.mId = 20;
parent.children.add(child2);
final String json = converter.toJson(JsonApiObject.single(parent));
assertThatJson(json).node("data").isObject();
assertThat(json, jsonPartEquals("data.type", ModelParent.TYPE));
assertThat(json, jsonNodeAbsent("data.attributes.favorite"));
assertThat(json, jsonNodeAbsent("data.attributes.children"));
assertThat(json, jsonPartEquals("data.relationships.favorite.type", ModelChild.TYPE));
assertThat(json, jsonPartEquals("data.relationships.favorite.id", parent.favorite.mId));
assertThat(json, jsonNodeAbsent("data.relationships.favorite.attributes"));
assertThatJson(json).node("data.relationships.children").isArray().ofLength(2);
assertThatJson(json).node("included").isArray().ofLength(2);
}
@Test
public void verifyFromJsonSimple() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelSimple.class).build();
final ModelSimple source = new ModelSimple(99);
final JsonApiObject<ModelSimple> output =
converter.fromJson(converter.toJson(JsonApiObject.single(source)), ModelSimple.class);
assertThat(output.isSingle(), is(true));
assertThat(output.getDataSingle(), is(not(nullValue())));
assertThat(output.getDataSingle().mId, is(99));
}
@Test
public void verifyFromJsonAttributes() throws Exception {
final JsonApiConverter converter = new JsonApiConverter.Builder().addClasses(ModelAttributes.class).build();
final ModelAttributes source = new ModelAttributes();
source.mId = 19;
source.transientAttr = "tneisnart";
source.attrStr1 = "1rtSrtta";
source.attrInt1 = 2;
source.attrBool1 = false;
source.attrAnn1 = "1nnArtta";
source.ann2 = "2nnArtta";
final JsonApiObject<ModelAttributes> output =
converter.fromJson(converter.toJson(JsonApiObject.single(source)), ModelAttributes.class);
assertThat(output.isSingle(), is(true));
final ModelAttributes target = output.getDataSingle();
assertThat(target, is(not(nullValue())));
assertThat(target.mId, is(source.mId));
assertThat(target.transientAttr, is("transient"));
assertThat(target.finalAttr, is("final"));
assertThat(target.attrStr1, is(source.attrStr1));
assertThat(target.attrInt1, is(source.attrInt1));
assertThat(target.attrBool1, is(source.attrBool1));
assertThat(target.attrAnn1, is(source.attrAnn1));
assertThat(target.ann2, is(source.ann2));
}
@Test
public void verifyFromJsonRelationships() throws Exception {
final JsonApiConverter converter =
new JsonApiConverter.Builder().addClasses(ModelParent.class, ModelChild.class).build();
final ModelParent parent = new ModelParent();
parent.mId = 1;
parent.favorite = new ModelChild();
parent.favorite.mId = 11;
parent.children.add(parent.favorite);
final ModelChild child2 = new ModelChild();
child2.mId = 20;
parent.children.add(child2);
final JsonApiObject<ModelParent> output =
converter.fromJson(converter.toJson(JsonApiObject.single(parent)), ModelParent.class);
assertThat(output.isSingle(), is(true));
final ModelParent target = output.getDataSingle();
assertThat(target, is(not(nullValue())));
assertThat(target.mId, is(parent.mId));
assertThat(target.favorite, is(not(nullValue())));
assertThat(target.favorite.mId, is(parent.favorite.mId));
}
public static final class ModelNoType {}
@JsonApiType("type")
public static final class ModelDuplicateType1 {}
@JsonApiType("type")
public static final class ModelDuplicateType2 {}
public abstract static class ModelBase {
@JsonApiId
int mId;
}
@JsonApiType(ModelSimple.TYPE)
public static final class ModelSimple extends ModelBase {
static final String TYPE = "simple";
public ModelSimple() {}
public ModelSimple(final int id) {
mId = id;
}
}
@JsonApiType(ModelAttributes.TYPE)
public static final class ModelAttributes extends ModelBase {
static final String TYPE = "attrs";
transient String transientAttr = "transient";
static String staticAttr = "static";
final String finalAttr = "final";
private String attrStr1 = "attrStr1";
public int attrInt1 = 1;
boolean attrBool1 = true;
@JsonApiAttribute
String attrAnn1 = "attrAnn1";
@JsonApiAttribute(name = "attrAnn2")
String ann2 = "attrAnn2";
}
@JsonApiType(ModelParent.TYPE)
public static final class ModelParent extends ModelBase {
static final String TYPE = "parent";
List<ModelChild> children = new ArrayList<>();
// everyone has a favorite child
ModelChild favorite;
}
@JsonApiType(ModelChild.TYPE)
public static final class ModelChild extends ModelBase {
static final String TYPE = "child";
}
}
| add a couple tests for attributes on included related objects
| gto-support-jsonapi/src/androidTest/java/org/ccci/gto/android/common/jsonapi/JsonApiConverterIT.java | add a couple tests for attributes on included related objects | <ide><path>to-support-jsonapi/src/androidTest/java/org/ccci/gto/android/common/jsonapi/JsonApiConverterIT.java
<ide> import java.util.ArrayList;
<ide> import java.util.List;
<ide>
<add>import static net.javacrumbs.jsonunit.JsonMatchers.jsonEquals;
<ide> import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodeAbsent;
<ide> import static net.javacrumbs.jsonunit.JsonMatchers.jsonNodePresent;
<ide> import static net.javacrumbs.jsonunit.JsonMatchers.jsonPartEquals;
<add>import static net.javacrumbs.jsonunit.core.Option.IGNORING_EXTRA_FIELDS;
<ide> import static net.javacrumbs.jsonunit.fluent.JsonFluentAssert.assertThatJson;
<ide> import static org.hamcrest.CoreMatchers.allOf;
<add>import static org.hamcrest.CoreMatchers.hasItem;
<ide> import static org.hamcrest.CoreMatchers.is;
<ide> import static org.hamcrest.CoreMatchers.not;
<ide> import static org.hamcrest.CoreMatchers.nullValue;
<ide>
<ide> final ModelParent parent = new ModelParent();
<ide> parent.mId = 1;
<del> parent.favorite = new ModelChild();
<add> parent.favorite = new ModelChild("Daniel");
<ide> parent.favorite.mId = 11;
<ide> parent.children.add(parent.favorite);
<del> final ModelChild child2 = new ModelChild();
<add> final ModelChild child2 = new ModelChild("Hey You");
<ide> child2.mId = 20;
<ide> parent.children.add(child2);
<ide>
<ide> assertThat(json, jsonNodeAbsent("data.relationships.favorite.attributes"));
<ide> assertThatJson(json).node("data.relationships.children").isArray().ofLength(2);
<ide> assertThatJson(json).node("included").isArray().ofLength(2);
<add> assertThatJson(json).node("included").matches(
<add> hasItem(jsonEquals("{type:'child',id:11,attributes:{name:'Daniel'}}").when(IGNORING_EXTRA_FIELDS)));
<add> assertThatJson(json).node("included").matches(
<add> hasItem(jsonEquals("{type:'child',id:20,attributes:{name:'Hey You'}}").when(IGNORING_EXTRA_FIELDS)));
<ide> }
<ide>
<ide> @Test
<ide> @JsonApiType(ModelChild.TYPE)
<ide> public static final class ModelChild extends ModelBase {
<ide> static final String TYPE = "child";
<add>
<add> String name;
<add>
<add> public ModelChild() {}
<add>
<add> public ModelChild(final String name) {
<add> this.name = name;
<add> }
<ide> }
<ide> } |
|
Java | mit | 0f4b3829e1411f5aac617e1a72961d77e10e7659 | 0 | mohamedalami2/crossover,fadysamirzakarya/Podam3,amrinder7705/test6,victorcobuz/podam-master,fxbonnet/podam,victorcobuz/podam-master,rayiss/podam,jatac23/Test-01,victorash91/problem1,quettech/qa,fawazzac1987/https-github.com-mtedone-podam,peusebiu28/PRODSUP-001-solution,darshankaarki/podam,quettech/qa,KnHack/podam,sbhat5/sandy,mateenamc/Moving-BDD-to-Unit-tests,guru2208/podam_1,amansahni1/PRODSUP-001,yeseulei/develop-copy,tgafiuc/prodsup-001,ELRuncho/podam,fabriciobressan/crossover_1,deepikamittal11/test1,farhantahirt/podm,arunpaulonline/test,Elttbakh/Mido,andrei1986us/potdam,aditya13121988/Harry,Elttbakh/Test01,fabriciobressan/crossover_test,hapugem/cross_over_tech_trial,sricrossover/podam,dakinyade/podam,guru2208/podam_1,gargchap/PODAM,badewanto/podam,deepikamittal11/mtedone-podam,SCORPIO12/project2,daivanov/joinmo,ilkhamkz1/podam,victorash91/problem1,mohamedalami2/crossover,fabriciobressan/crossover,ProfTariq/testproject,SCORPIO12/project2,ajmalm83/test,fabriciobressan/crossover_question1,sruputway/podam_testing,KnHack/podam,fadysamirzakarya/podam-fork,Bhaskarsharma8/podam,Tshifhiwa84/Case-2,amansahni1/PRODSUP-001,dakinyade/podam,mbreslow/podam,fadysz/podam,daivanov/podam,arunpaulonline/test,peusebiu28/PRODSUP-001-solution,ELRuncho/podam,ChintanInterview/podamChintanInterview,ignitete/podam,thanhnbt/podam,sbhat5/sandy,lawrencegrey/podam-master,syedbilalmasaud/casename,muhamadsoliman/podam,franciscoGar/podam,deepikamittal11/test1,devopsfolks/podam,sufianqayyum131/PRODSUP-001,jowferraz/podam,fawazzac1987/https-github.com-mtedone-podam,kiran-borole/podam1,mohitj17/assignment1,shubhcollaborator/newpodam,ajaykumar86/podam,farhantahirt/podm,fadysamirzakarya/Podam3,fabriciobressan/crossover_1,rchennuri/podam,axlmor76/podam-1,rayiss/podam,chio003/Test,sricrossover/podam,HARIK5415/pod,aditya13121988/Harry,Elttbakh/Mido,muhammadallee/podam,SlothMasterRace/Test,daivanov/podam,jtardaguila/test,deepikamittal11/mtedone-podam,nova8888/PRODSUP-001,Elttbakh/Test01,rchennuri/podam,SilvesterGit/podam,zeeshan87/podam,mateenamc/Moving-BDD-to-Unit-tests,Bipasa/podam,kiran-borole/podam1,ovpopa/podam,lihenu/podam,Bipasa/podam,fadysamirzakarya/podam2,franciscoGar/podam,mohitj17/assignment1,Tshifhiwa84/Case-2,ProfTariq/testproject,devopsfolks/podam,jtardaguila/test,lihenu/podam,fabriciobressan/crossover_question1,mtedone/podam,victorash91/new,darshankaarki/podam,SilvesterGit/podam,andrei1986us/potdam,muhamadsoliman/podam,fxbonnet/podam,expertryk/podam_my,chio003/Test,ovpopa/podam,iamargo1979/podam,HARIK5415/pod,expertryk/podam_my,maboumejd/Case-PRODSUP-001---Harry,mmnofal/podam,muhammadallee/podam,ChintanInterview/podamChintanInterview,syedbilalmasaud/case1,daivanov/joinmo,fadysamirzakarya/podam2,Thaarini/testcase1,sruputway/podam_testing,ppusatkar2786/podam,zeeshan87/podam,sufianqayyum131/PRODSUP-001,Bhaskarsharma8/podam,gargchap/PODAM,fadysamirzakarya/podam-fork,badewanto/podam,tgafiuc/prodsup-001,hapugem/cross_over_tech_trial,ignitete/podam,victorash91/new,ocanto24/podam,flopez000001/podam,syedbilalmasaud/case1,Thaarini/testcase1,nova8888/PRODSUP-001,ocanto24/podam,viliescu/PRODSUP-001,fadysz/podam,jatac23/Test-01,fabriciobressan/crossover,amrinder7705/test6,ilkhamkz1/podam,mtedone/podam,iamargo1979/podam,viliescu/PRODSUP-001,lawrencegrey/podam-master,yeseulei/develop-copy,fabriciobressan/crossover_test,thanhnbt/podam,mmnofal/podam,SlothMasterRace/Test,axlmor76/podam-1,ppusatkar2786/podam,maboumejd/Case-PRODSUP-001---Harry,flopez000001/podam,jowferraz/podam,mbreslow/podam,ajmalm83/test,ajaykumar86/podam,shubhcollaborator/newpodam,syedbilalmasaud/casename | src/test/java/uk/co/jemos/podam/test/unit/EnumTest.java | package uk.co.jemos.podam.test.unit;
import org.junit.Test;
import uk.co.jemos.podam.test.enums.ExternalRatePodamEnum;
/**
* Created by tedonema on 05/07/2015.
*/
public class EnumTest {
@Test
public void testEnum() {
System.out.println(ExternalRatePodamEnum.class.getName());
}
}
| Removed experiment test
| src/test/java/uk/co/jemos/podam/test/unit/EnumTest.java | Removed experiment test | <ide><path>rc/test/java/uk/co/jemos/podam/test/unit/EnumTest.java
<del>package uk.co.jemos.podam.test.unit;
<del>
<del>import org.junit.Test;
<del>import uk.co.jemos.podam.test.enums.ExternalRatePodamEnum;
<del>
<del>/**
<del> * Created by tedonema on 05/07/2015.
<del> */
<del>public class EnumTest {
<del>
<del> @Test
<del> public void testEnum() {
<del>
<del> System.out.println(ExternalRatePodamEnum.class.getName());
<del>
<del> }
<del>} |
||
Java | mit | 6412eba275c02edbc1ccda3e475364924d3f0770 | 0 | ekarayel/sync-mht,ekarayel/sync-mht | package com.github.ekarayel.syncmht.benchmarks;
import com.google.api.client.googleapis.compute.ComputeCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.InputStreamContent;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.compute.Compute;
import com.google.api.services.storage.Storage;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
public class Stop {
private static String INSTANCE_ID_ENV = "INSTANCE_ID";
private static String TRAVIS_COMMIT_ENV = "TRAVIS_COMMIT";
public static void main(String[] args) throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
ComputeCredential credential =
new ComputeCredential.Builder(httpTransport, JacksonFactory.getDefaultInstance())
.build();
FileInputStream fis = new FileInputStream("benchmarks.json");
InputStreamContent mediaContent = new InputStreamContent("application/json", fis);
// Upload metrics
new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), credential)
.setApplicationName(Constants.APP_NAME)
.build()
.objects()
.insert(Constants.BUCKET_NAME, null, mediaContent)
.setName("benchmark-"+Instant.now().toString())
.execute();
// Shutdown
new Compute.Builder(httpTransport, JacksonFactory.getDefaultInstance(), credential)
.setApplicationName(Constants.APP_NAME)
.build()
.instances()
.delete(Constants.PROJECT_ID, Constants.ZONE_NAME, System.getenv(INSTANCE_ID_ENV))
.execute();
}
}
| benchmarks/src/main/java/com/github/ekarayel/syncmht/benchmarks/Stop.java | package com.github.ekarayel.syncmht.benchmarks;
import com.google.api.client.googleapis.compute.ComputeCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.InputStreamContent;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.compute.Compute;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.model.StorageObject;
import com.sun.tools.internal.jxc.ap.Const;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.time.Instant;
public class Stop {
private static String INSTANCE_ID_ENV = "INSTANCE_ID";
private static String TRAVIS_COMMIT_ENV = "TRAVIS_COMMIT";
public static void main(String[] args) throws GeneralSecurityException, IOException {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
ComputeCredential credential =
new ComputeCredential.Builder(httpTransport, JacksonFactory.getDefaultInstance())
.build();
FileInputStream fis = new FileInputStream("benchmarks.json");
InputStreamContent mediaContent = new InputStreamContent("application/json", fis);
// Upload metrics
new Storage.Builder(httpTransport, JacksonFactory.getDefaultInstance(), credential)
.setApplicationName(Constants.APP_NAME)
.build()
.objects()
.insert(Constants.BUCKET_NAME, null, mediaContent)
.setName("benchmark-"+Instant.now().toString())
.execute();
// Shutdown
new Compute.Builder(httpTransport, JacksonFactory.getDefaultInstance(), credential)
.setApplicationName(Constants.APP_NAME)
.build()
.instances()
.delete(Constants.PROJECT_ID, Constants.ZONE_NAME, System.getenv(INSTANCE_ID_ENV))
.execute();
}
}
| Fixed imports.
| benchmarks/src/main/java/com/github/ekarayel/syncmht/benchmarks/Stop.java | Fixed imports. | <ide><path>enchmarks/src/main/java/com/github/ekarayel/syncmht/benchmarks/Stop.java
<ide> import com.google.api.client.json.jackson2.JacksonFactory;
<ide> import com.google.api.services.compute.Compute;
<ide> import com.google.api.services.storage.Storage;
<del>import com.google.api.services.storage.model.StorageObject;
<del>import com.sun.tools.internal.jxc.ap.Const;
<ide>
<ide> import java.io.FileInputStream;
<ide> import java.io.IOException; |
|
JavaScript | apache-2.0 | 03a5469e45e7971e6103887d3d0da6ba61e35c63 | 0 | googleinterns/NeighborGood,googleinterns/NeighborGood,googleinterns/NeighborGood | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
const MAPSKEY = config.MAPS_KEY
let userLocation = null;
let currentCategory = "all";
let currentMiles = 5;
let currentPage = 1;
let taskPagesCache = null;
let currentPageNumberNode = null;
window.addEventListener("resize", displayPaginationUI);
//window.onscroll = stickyControlBar;
/* Scroll function so that the control bar sticks to the top of the page */
function stickyControlBar() {
let controlBarWrapper = document.getElementById("control-bar-message-wrapper");
let taskListDiv = document.getElementById("tasks-list");
// Scrolling behavior in screens smaller than 1204 will result in overlapping DOM elements
// therefore this scrolling function only applies to screens as big or bigger than that
if (window.innerWidth >= 1204) {
const OFFSET = 190; //Distance from top of page to top of control (categories) bar
if (window.pageYOffset >= OFFSET || document.body.scrollTop >= OFFSET || document.documentElement.scrollTop >= OFFSET) {
controlBarWrapper.style.position = "fixed";
// adjust task list container so it appears like it's in the same position
// after controlBarWrapper's position is changed to 'fixed'
taskListDiv.style.marginTop = "165px";
} else {
controlBarWrapper.style.position = "relative";
taskListDiv.style.marginTop = "auto";
}
}
}
/* Calls addUIClickHandlers and getTasksForUserLocation once page has loaded */
if (document.readyState === 'loading') {
// adds on load event listeners if document hasn't yet loaded
document.addEventListener('DOMContentLoaded', addUIClickHandlers);
document.addEventListener('DOMContentLoaded', getTasksForUserLocation);
} else {
// if DOMContentLoaded has already fired, it simply calls the functions
addUIClickHandlers();
getTasksForUserLocation();
}
/* Function adds all the necessary UI 'click' event listeners*/
function addUIClickHandlers() {
// adds showCreateTaskModal and closeCreateTaskModal click events for the add task button
if (document.body.contains(document.getElementById("addtaskbutton"))) {
document.getElementById("addtaskbutton").addEventListener("click", showCreateTaskModal);
document.getElementById("close-addtask-button").addEventListener("click", closeCreateTaskModal);
}
// adds filterTasksBy click event listener to category buttons
const categoryButtons = document.getElementsByClassName("categories");
for (let i = 0; i < categoryButtons.length; i++) {
categoryButtons[i].addEventListener("click", function(e) {
filterTasksBy(e.target.id);
});
}
// adds showTopScoresModal click event
document.getElementById("topscore-button").addEventListener("click", showTopScoresModal);
document.getElementById("close-topscore-button").addEventListener("click", closeTopScoresModal);
// adds distance radius change event
document.getElementById("distance-radius").addEventListener("change", function(e) {
fetchTasks(currentCategory, e.target.value)
.then(response => {
displayTasks(response);
displayPaginationUI();
});
});
// adds nextPage and prevPage click events
document.getElementById("prev-page").addEventListener("click", prevPage);
document.getElementById("next-page").addEventListener("click", nextPage);
}
/* Function loads previous page of tasks */
function prevPage() {
if (currentPage > 1) {
currentPage--;
displayTasks();
displayPaginationUI();
}
}
/* Function loads next page of tasks */
function nextPage() {
if (currentPage < taskPagesCache.pageCount) {
currentPage++;
displayTasks();
displayPaginationUI();
}
}
/* Function filters tasks by categories and styles selected categories */
function filterTasksBy(category) {
currentCategory = category;
// only fetches tasks if user's location has been retrieved
if (userLocationIsKnown()) {
fetchTasks(category, currentMiles)
.then(response => {
displayTasks(response);
displayPaginationUI();
});
}
// Unhighlights and resets styling for all category buttons
const categoryButtons = document.getElementsByClassName("categories");
for (let i = 0; i < categoryButtons.length; i++){
let button = categoryButtons[i];
if (document.getElementById(category) != button) {
button.style.backgroundColor = "rgb(76, 175, 80)";
button.addEventListener("mouseover", function() {
button.style.backgroundColor = "rgb(62, 142, 65)";
});
button.addEventListener("mouseout", function() {
button.style.backgroundColor = "rgb(76, 175, 80)"
});
} else {
button.style.backgroundColor = "rgb(62, 142, 65)";
button.addEventListener("mouseover", function() {
button.style.backgroundColor = "rgb(62, 142, 65)";
});
button.addEventListener("mouseout", function() {
button.style.backgroundColor = "rgb(62, 142, 65)"
});
}
}
}
/* Function that display the help out overlay */
function helpOut(element) {
const task = element.closest(".task");
const overlay = task.getElementsByClassName("help-overlay");
overlay[0].style.display = "block";
}
/* Function sends a fetch request to the edit task servlet when the user
offers to help out, edits the task's status and helper properties, and
then reloads the task list */
function confirmHelp(element) {
const task = element.closest(".task");
const url = "/tasks/edit?task-id=" + task.dataset.key + "&action=helpout";
const request = new Request(url, {method: "POST"});
fetch(request).then((response) => {
// checks if another user has already claimed the task
if (response.status == 409) {
window.alert
("We're sorry, but the task you're trying to help with has already been claimed by another user.");
window.location.href = '/';
}
// fetches tasks again if user's current location was successfully retrieved and stored
else if (userLocationIsKnown()) {
fetchTasks(currentCategory, currentMiles).then(response => {
displayTasks(response);
displayPaginationUI();
});
}
});
}
/* Function that hides the help out overlay */
function exitHelp(element) {
element.closest(".help-overlay").style.display = "none";
}
/* Leonard's implementation of the Add Task modal */
function showCreateTaskModal() {
var modal = document.getElementById("createTaskModalWrapper");
modal.style.display = "block";
}
function closeCreateTaskModal() {
var modal = document.getElementById("createTaskModalWrapper");
modal.style.display = "none";
}
function validateTaskForm(id) {
var result = true;
var form = document.getElementById(id);
var inputName = ["task-overview", "task-detail", "reward", "category"];
for (var i = 0; i < inputName.length; i++) {
var name = inputName[i];
var inputField = form[name.concat("-input")].value.trim();
if (inputField === "") {
result = false;
form[name.concat("-input")].classList.add("highlight");
} else {
form[name.concat("-input")].classList.remove("highlight");
}
}
if (!result) {
alert("All fields are required. Please fill out all fields with non-empty input.");
return false;
}
return true;
}
/* Function that calls the loadTopScorers functions
and then shows the top scores modal */
function showTopScoresModal() {
loadTopScorers("world");
if (userLocationIsKnown()){
loadTopScorers("nearby");
}
document.getElementById("topScoresModalWrapper").style.display = "block";
}
/* Function closes the top scores modal */
function closeTopScoresModal() {
document.getElementById("topScoresModalWrapper").style.display = "none";
}
/* Function loads the data for the top scorers table */
function loadTopScorers(location) {
let url = "/account?action=topscorers";
if (location === "nearby") {
url += "&lat=" + userLocation.lat + "&lng=" + userLocation.lng;
}
fetch(url)
.then(response => response.json())
.then(users => {
// Inserts Nickname and Points for every top scorer
for (let i = 0; i < users.length; i++) {
let points = users[i].points;
let nickname = users[i].nickname;
let rowId = location + (i + 1);
let row = document.getElementById(rowId);
let rowNickname = row.getElementsByClassName("topscore-nickname")[0];
let rowScore = row.getElementsByClassName("topscore-score")[0];
rowNickname.innerText = nickname;
rowScore.innerText = points;
// Adds different styling if row includes current user
if (users[i].isCurrentUser) {
row.style.fontWeight = "bold";
row.setAttribute("title", "Congratulations, you made it to the Top Scorers Board!");
}
}
});
}
// If the user clicks outside of the modals, closes the modals directly
window.onclick = function(event) {
var createTaskModal = document.getElementById("createTaskModalWrapper");
if (event.target == createTaskModal) {
createTaskModal.style.display = "none";
}
var topScoresModal = document.getElementById("topScoresModalWrapper");
if (event.target == topScoresModal) {
topScoresModal.style.display = "none";
}
var infoModal = document.getElementById("taskInfoModalWrapper");
if (event.target == infoModal) {
infoModal.style.display = "none";
}
}
/* Leonard's implementation of showing task details in a pop up window */
async function getTaskInfo(keyString) {
const queryURL = "/tasks/info?key=" + keyString;
const request = new Request(queryURL, {method: "GET"});
const response = await fetch(request);
const info = await response.json();
return info;
}
async function showTaskInfo(keyString) {
const info = await getTaskInfo(keyString);
var detailContainer = document.getElementById("task-detail-container");
detailContainer.innerHTML = "";
detailContainer.appendChild(document.createTextNode(info.detail));
var modal = document.getElementById("taskInfoModalWrapper");
modal.style.display = "block";
}
function closeTaskInfoModal() {
var modal = document.getElementById("taskInfoModalWrapper");
modal.style.display = "none";
}
/* Function dynamically adds Maps API and
begins the processes of retrieving the user's location*/
function getTasksForUserLocation() {
const script = document.createElement("script");
script.type = "text/javascript";
script.src = "https://maps.googleapis.com/maps/api/js?key=" + MAPSKEY + "&callback=initialize&language=en";
script.defer = true;
script.async = true;
document.head.appendChild(script);
// Once the Maps API script has dynamically loaded it gets the user location,
// waits until it gets an answer updates the global userLoaction variable and then calls
// fetchTasks and displayTasks
window.initialize = function () {
getUserLocation().then(() => fetchTasks(currentCategory, currentMiles))
.then(response => {
displayTasks(response);
displayPaginationUI();
})
.catch(() => {
console.error("User location could not be retrieved");
document.getElementById("location-missing-message").style.display = "block";
});
}
}
/* Function that returns a promise to get and return the user's location */
function getUserLocation() {
return new Promise((resolve, reject) => {
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(function(position) {
userLocation = {lat: position.coords.latitude, lng: position.coords.longitude};
resolve(userLocation);
}, function() {
if (locationByIPSuccesful()) resolve(userLocation);
else reject("User location failed");
});
} else {
if (locationByIPSuccesful()) resolve(userLocation);
else reject("User location failed");
}
});
}
/* Function used as a fallback to retrieve the user's location by IP address */
function locationByIPSuccesful() {
let url = "https://www.googleapis.com/geolocation/v1/geolocate?key=" + MAPSKEY;
const request = new Request(url, {method: "POST"});
fetch(request).then(response => {
if (response.status == 400 || response.status == 403 || response.status == 404) {
return false;
} else {
response.json().then(jsonresponse => {
userLocation = jsonresponse["location"];
return true;
});
}
});
}
/* Fetches tasks from servlet by location and category */
function fetchTasks(category, miles) {
let url = "/tasks?lat=" + userLocation.lat + "&lng=" + userLocation.lng + "&miles=" + miles;
if (category !== undefined && category != "all") {
url += "&category=" + category;
}
return fetch(url).then(response => response.json());
}
/* Displays the tasks received from the server response */
function displayTasks(response) {
// If a response is passed, the the taskPagesCache is updated along with the next and prev page buttons
if (response !== undefined) {
taskPagesCache = response;
// If displayTasks is called and the result has less pages than the page user was last at, the currentPage will get reset to 1
if (currentPage > taskPagesCache.pageCount) {
currentPage = 1;
}
}
if (taskPagesCache !== null && taskPagesCache.taskCount > 0) {
document.getElementById("no-tasks-message").style.display = "none";
document.getElementById("tasks-message").style.display = "block";
document.getElementById("tasks-list").innerHTML = taskPagesCache.taskPages[currentPage - 1];
document.getElementById("tasks-list").style.display = "block";
addTasksClickHandlers();
} else {
document.getElementById("no-tasks-message").style.display = "block";
document.getElementById("tasks-message").style.display = "none";
document.getElementById("tasks-list").style.display = "none";
}
}
/** Function loads and displays all the pagination UI components (page numbers and buttons) */
function displayPaginationUI() {
updatePageButtonStyling();
let pageNumbersWrapper = document.getElementById("page-numbers-wrapper");
pageNumbersWrapper.innerHTML = "";
// Page numbers displayed for smaller screens with several pages should
// show only the first, current, and last page
if (taskPagesCache.pageCount >= 5 && (window.innerWidth < 375)) {
let pageNumberSpacing = document.createElement("div");
pageNumberSpacing.classList.add("page-number-spacing");
pageNumberSpacing.innerText = "...";
// Only displays first page if the current page isn't already the first page
if (currentPage !== 1) {
let firstPageNumber = createPageNumberElement(1);
pageNumbersWrapper.appendChild(firstPageNumber);
pageNumbersWrapper.appendChild(pageNumberSpacing);
}
let currentPageNumber = createPageNumberElement(currentPage);
pageNumbersWrapper.appendChild(currentPageNumber);
// Only displays last page if the current page isn't already the last page
if (currentPage != taskPagesCache.pageCount) {
pageNumbersWrapper.appendChild(pageNumberSpacing.cloneNode(true));
let lastPageNumber = createPageNumberElement(taskPagesCache.pageCount);
pageNumbersWrapper.appendChild(lastPageNumber);
}
// Displays all page numbers if less than 5 pages or if the screen is big enough
} else {
for (let i = 1; i <= taskPagesCache.pageCount; i++) {
let pageNumber = createPageNumberElement(i);
pageNumbersWrapper.appendChild(pageNumber);
}
}
currentPageNumberNode = document.getElementById("current-page");
}
/** Helper function that creates a page number element when provided with the page number */
function createPageNumberElement(number) {
let pageNumber = document.createElement("a");
pageNumber.classList.add("page-number");
pageNumber.innerText = number;
if (currentPage === number) pageNumber.setAttribute("id", "current-page");
pageNumber.addEventListener("click", function() {
currentPageNumberNode.removeAttribute("id");
currentPageNumberNode = pageNumber;
currentPage = number;
pageNumber.setAttribute("id", "current-page");
displayPaginationUI();
displayTasks();
});
return pageNumber;
}
/** Function Updates the attributes and styles of the next and prev page buttons upon page changes */
function updatePageButtonStyling() {
let nextPageButton = document.getElementById("next-page");
let prevPageButton = document.getElementById("prev-page");
if (currentPage === taskPagesCache.pageCount) {
nextPageButton.style.cursor = "not-allowed";
nextPageButton.setAttribute("title", "You are already on the last page");
} else {
nextPageButton.style.cursor = "pointer";
nextPageButton.removeAttribute("title");
}
if (currentPage === 1) {
prevPageButton.style.cursor = "not-allowed";
prevPageButton.setAttribute("title", "You are already on the first page");
} else {
prevPageButton.style.cursor = "pointer";
prevPageButton.removeAttribute("title");
}
}
/* Function adds all the necessary tasks 'click' event listeners*/
function addTasksClickHandlers() {
// adds confirmHelp click event listener to confirm help buttons
const confirmHelpButtons = document.getElementsByClassName("confirm-help");
for (let i = 0; i < confirmHelpButtons.length; i++){
confirmHelpButtons[i].addEventListener("click", function(e) {
confirmHelp(e.target);
});
}
// adds exitHelp click event listener to exit help buttons
const exitHelpButtons = document.getElementsByClassName("exit-help");
for (let i = 0; i < exitHelpButtons.length; i++) {
exitHelpButtons[i].addEventListener("click", function(e) {
exitHelp(e.target);
});
}
// adds helpOut click event listener to help out buttons
const helpOutButtons = document.getElementsByClassName("help-out");
for (let i = 0; i < helpOutButtons.length; i++) {
if (!helpOutButtons[i].classList.contains("disable-help")) {
helpOutButtons[i].addEventListener("click", function(e) {
helpOut(e.target);
});
}
}
}
/* Helper function that determines if the current user's location is known */
function userLocationIsKnown() {
return (userLocation !== null);
}
| src/main/webapp/homepage_script.js | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
const MAPSKEY = config.MAPS_KEY
let userLocation = null;
let currentCategory = "all";
let currentMiles = 5;
let currentPage = 1;
let taskPagesCache = null;
let currentPageNumberNode = null;
//window.onscroll = stickyControlBar;
/* Scroll function so that the control bar sticks to the top of the page */
function stickyControlBar() {
let controlBarWrapper = document.getElementById("control-bar-message-wrapper");
let taskListDiv = document.getElementById("tasks-list");
// Scrolling behavior in screens smaller than 1204 will result in overlapping DOM elements
// therefore this scrolling function only applies to screens as big or bigger than that
if (window.innerWidth >= 1204) {
const OFFSET = 190; //Distance from top of page to top of control (categories) bar
if (window.pageYOffset >= OFFSET || document.body.scrollTop >= OFFSET || document.documentElement.scrollTop >= OFFSET) {
controlBarWrapper.style.position = "fixed";
// adjust task list container so it appears like it's in the same position
// after controlBarWrapper's position is changed to 'fixed'
taskListDiv.style.marginTop = "165px";
} else {
controlBarWrapper.style.position = "relative";
taskListDiv.style.marginTop = "auto";
}
}
}
/* Calls addUIClickHandlers and getTasksForUserLocation once page has loaded */
if (document.readyState === 'loading') {
// adds on load event listeners if document hasn't yet loaded
document.addEventListener('DOMContentLoaded', addUIClickHandlers);
document.addEventListener('DOMContentLoaded', getTasksForUserLocation);
} else {
// if DOMContentLoaded has already fired, it simply calls the functions
addUIClickHandlers();
getTasksForUserLocation();
}
/* Function adds all the necessary UI 'click' event listeners*/
function addUIClickHandlers() {
// adds showCreateTaskModal and closeCreateTaskModal click events for the add task button
if (document.body.contains(document.getElementById("addtaskbutton"))) {
document.getElementById("addtaskbutton").addEventListener("click", showCreateTaskModal);
document.getElementById("close-addtask-button").addEventListener("click", closeCreateTaskModal);
}
// adds filterTasksBy click event listener to category buttons
const categoryButtons = document.getElementsByClassName("categories");
for (let i = 0; i < categoryButtons.length; i++) {
categoryButtons[i].addEventListener("click", function(e) {
filterTasksBy(e.target.id);
});
}
// adds showTopScoresModal click event
document.getElementById("topscore-button").addEventListener("click", showTopScoresModal);
document.getElementById("close-topscore-button").addEventListener("click", closeTopScoresModal);
// adds distance radius change event
document.getElementById("distance-radius").addEventListener("change", function(e) {
fetchTasks(currentCategory, e.target.value)
.then(response => displayTasks(response));
});
// adds nextPage and prevPage click events
document.getElementById("prev-page").addEventListener("click", prevPage);
document.getElementById("next-page").addEventListener("click", nextPage);
}
/* Function loads previous page of tasks */
function prevPage() {
if (currentPage > 1) {
currentPage--;
displayTasks();
if (currentPage === 1) {
let prevPageButton = document.getElementById("prev-page");
prevPageButton.style.cursor = "not-allowed";
prevPageButton.setAttribute("title", "You are already on the first page");
}
if (currentPage < taskPagesCache.pageCount) {
let nextPageButton = document.getElementById("next-page");
nextPageButton.style.cursor = "pointer";
nextPageButton.removeAttribute("title");
}
displayPaginationUI();
}
}
/* Function loads next page of tasks */
function nextPage() {
if (currentPage < taskPagesCache.pageCount) {
currentPage++;
displayTasks();
document.getElementById("prev-page").style.cursor = "pointer";
document.getElementById("prev-page").removeAttribute("title");
if (currentPage === taskPagesCache.pageCount) {
let nextPageButton = document.getElementById("next-page");
nextPageButton.style.cursor = "not-allowed";
nextPageButton.setAttribute("title", "You are already on the last page");
}
displayPaginationUI();
}
}
/* Function filters tasks by categories and styles selected categories */
function filterTasksBy(category) {
currentCategory = category;
// only fetches tasks if user's location has been retrieved
if (userLocationIsKnown()) {
fetchTasks(category, currentMiles)
.then(response => displayTasks(response));
}
// Unhighlights and resets styling for all category buttons
const categoryButtons = document.getElementsByClassName("categories");
for (let i = 0; i < categoryButtons.length; i++){
let button = categoryButtons[i];
if (document.getElementById(category) != button) {
button.style.backgroundColor = "rgb(76, 175, 80)";
button.addEventListener("mouseover", function() {
button.style.backgroundColor = "rgb(62, 142, 65)";
});
button.addEventListener("mouseout", function() {
button.style.backgroundColor = "rgb(76, 175, 80)"
});
} else {
button.style.backgroundColor = "rgb(62, 142, 65)";
button.addEventListener("mouseover", function() {
button.style.backgroundColor = "rgb(62, 142, 65)";
});
button.addEventListener("mouseout", function() {
button.style.backgroundColor = "rgb(62, 142, 65)"
});
}
}
}
/* Function that display the help out overlay */
function helpOut(element) {
const task = element.closest(".task");
const overlay = task.getElementsByClassName("help-overlay");
overlay[0].style.display = "block";
}
/* Function sends a fetch request to the edit task servlet when the user
offers to help out, edits the task's status and helper properties, and
then reloads the task list */
function confirmHelp(element) {
const task = element.closest(".task");
const url = "/tasks/edit?task-id=" + task.dataset.key + "&action=helpout";
const request = new Request(url, {method: "POST"});
fetch(request).then((response) => {
// checks if another user has already claimed the task
if (response.status == 409) {
window.alert
("We're sorry, but the task you're trying to help with has already been claimed by another user.");
window.location.href = '/';
}
// fetches tasks again if user's current location was successfully retrieved and stored
else if (userLocationIsKnown()) {
fetchTasks(currentCategory, currentMiles).then(response => displayTasks(response));
}
});
}
/* Function that hides the help out overlay */
function exitHelp(element) {
element.closest(".help-overlay").style.display = "none";
}
/* Leonard's implementation of the Add Task modal */
function showCreateTaskModal() {
var modal = document.getElementById("createTaskModalWrapper");
modal.style.display = "block";
}
function closeCreateTaskModal() {
var modal = document.getElementById("createTaskModalWrapper");
modal.style.display = "none";
}
function validateTaskForm(id) {
var result = true;
var form = document.getElementById(id);
var inputName = ["task-overview", "task-detail", "reward", "category"];
for (var i = 0; i < inputName.length; i++) {
var name = inputName[i];
var inputField = form[name.concat("-input")].value.trim();
if (inputField === "") {
result = false;
form[name.concat("-input")].classList.add("highlight");
} else {
form[name.concat("-input")].classList.remove("highlight");
}
}
if (!result) {
alert("All fields are required. Please fill out all fields with non-empty input.");
return false;
}
return true;
}
/* Function that calls the loadTopScorers functions
and then shows the top scores modal */
function showTopScoresModal() {
loadTopScorers("world");
if (userLocationIsKnown()){
loadTopScorers("nearby");
}
document.getElementById("topScoresModalWrapper").style.display = "block";
}
/* Function closes the top scores modal */
function closeTopScoresModal() {
document.getElementById("topScoresModalWrapper").style.display = "none";
}
/* Function loads the data for the top scorers table */
function loadTopScorers(location) {
let url = "/account?action=topscorers";
if (location === "nearby") {
url += "&lat=" + userLocation.lat + "&lng=" + userLocation.lng;
}
fetch(url)
.then(response => response.json())
.then(users => {
// Inserts Nickname and Points for every top scorer
for (let i = 0; i < users.length; i++) {
let points = users[i].points;
let nickname = users[i].nickname;
let rowId = location + (i + 1);
let row = document.getElementById(rowId);
let rowNickname = row.getElementsByClassName("topscore-nickname")[0];
let rowScore = row.getElementsByClassName("topscore-score")[0];
rowNickname.innerText = nickname;
rowScore.innerText = points;
// Adds different styling if row includes current user
if (users[i].isCurrentUser) {
row.style.fontWeight = "bold";
row.setAttribute("title", "Congratulations, you made it to the Top Scorers Board!");
}
}
});
}
// If the user clicks outside of the modals, closes the modals directly
window.onclick = function(event) {
var createTaskModal = document.getElementById("createTaskModalWrapper");
if (event.target == createTaskModal) {
createTaskModal.style.display = "none";
}
var topScoresModal = document.getElementById("topScoresModalWrapper");
if (event.target == topScoresModal) {
topScoresModal.style.display = "none";
}
var infoModal = document.getElementById("taskInfoModalWrapper");
if (event.target == infoModal) {
infoModal.style.display = "none";
}
}
/* Leonard's implementation of showing task details in a pop up window */
async function getTaskInfo(keyString) {
const queryURL = "/tasks/info?key=" + keyString;
const request = new Request(queryURL, {method: "GET"});
const response = await fetch(request);
const info = await response.json();
return info;
}
async function showTaskInfo(keyString) {
const info = await getTaskInfo(keyString);
var detailContainer = document.getElementById("task-detail-container");
detailContainer.innerHTML = "";
detailContainer.appendChild(document.createTextNode(info.detail));
var modal = document.getElementById("taskInfoModalWrapper");
modal.style.display = "block";
}
function closeTaskInfoModal() {
var modal = document.getElementById("taskInfoModalWrapper");
modal.style.display = "none";
}
/* Function dynamically adds Maps API and
begins the processes of retrieving the user's location*/
function getTasksForUserLocation() {
const script = document.createElement("script");
script.type = "text/javascript";
script.src = "https://maps.googleapis.com/maps/api/js?key=" + MAPSKEY + "&callback=initialize&language=en";
script.defer = true;
script.async = true;
document.head.appendChild(script);
// Once the Maps API script has dynamically loaded it gets the user location,
// waits until it gets an answer updates the global userLoaction variable and then calls
// fetchTasks and displayTasks
window.initialize = function () {
getUserLocation().then(() => fetchTasks(currentCategory, currentMiles))
.then(jsonresponse => displayTasks(jsonresponse))
.catch(() => {
console.error("User location could not be retrieved");
document.getElementById("location-missing-message").style.display = "block";
});
}
}
/* Function that returns a promise to get and return the user's location */
function getUserLocation() {
return new Promise((resolve, reject) => {
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(function(position) {
userLocation = {lat: position.coords.latitude, lng: position.coords.longitude};
resolve(userLocation);
}, function() {
if (locationByIPSuccesful()) resolve(userLocation);
else reject("User location failed");
});
} else {
if (locationByIPSuccesful()) resolve(userLocation);
else reject("User location failed");
}
});
}
function locationByIPSuccesful() {
let url = "https://www.googleapis.com/geolocation/v1/geolocate?key=" + MAPSKEY;
const request = new Request(url, {method: "POST"});
fetch(request).then(response => {
if (response.status == 400 || response.status == 403 || response.status == 404) {
return false;
} else {
response.json().then(jsonresponse => {
userLocation = jsonresponse["location"];
return true;
});
}
});
}
/* Fetches tasks from servlet by location and category */
function fetchTasks(category, miles) {
let url = "/tasks?lat=" + userLocation.lat + "&lng=" + userLocation.lng + "&miles=" + miles;
if (category !== undefined && category != "all") {
url += "&category=" + category;
}
return fetch(url).then(response => response.json());
}
/* Displays the tasks received from the server response */
function displayTasks(response) {
// If a response is passed, the the taskPagesCache is updated along with the next and prev page buttons
if (response !== undefined) {
taskPagesCache = response;
displayPaginationUI();
}
if (taskPagesCache !== null && taskPagesCache.taskCount > 0) {
document.getElementById("no-tasks-message").style.display = "none";
document.getElementById("tasks-message").style.display = "block";
document.getElementById("tasks-list").innerHTML = taskPagesCache.taskPages[currentPage - 1];
document.getElementById("tasks-list").style.display = "block";
addTasksClickHandlers();
} else {
document.getElementById("no-tasks-message").style.display = "block";
document.getElementById("tasks-message").style.display = "none";
document.getElementById("tasks-list").style.display = "none";
}
}
function displayPaginationUI() {
let nextPageButton = document.getElementById("next-page");
let prevPageButton = document.getElementById("prev-page");
if (taskPagesCache.pageCount > 1) {
nextPageButton.style.cursor = "pointer";
nextPageButton.removeAttribute("title");
} else {
nextPageButton.style.cursor = "not-allowed";
nextPageButton.setAttribute("title", "You are already on the last page");
}
// If displayTasks is called and the result has less pages than the page user was last at, the currentPage will get reset to 1
if (currentPage > taskPagesCache.pageCount) {
currentPage = 1;
}
if (currentPage > 1) {
prevPageButton.style.cursor = "pointer";
prevPageButton.removeAttribute("title");
} else {
prevPageButton.style.cursor = "not-allowed";
prevPageButton.setAttribute("title", "You are already on the first page");
}
let pageNumbersWrapper = document.getElementById("page-numbers-wrapper");
pageNumbersWrapper.innerHTML = "";
if (taskPagesCache.pageCount <= 5 || (window.innerWidth >= 375)) {
for (let i = 1; i <= taskPagesCache.pageCount; i++) {
let pageNumber = document.createElement("a");
pageNumber.classList.add("page-number");
if (i === currentPage) pageNumber.setAttribute("id", "current-page");
pageNumber.innerText = i;
pageNumber.addEventListener("click", function() {
currentPageNumberNode.removeAttribute("id");
currentPageNumberNode = pageNumber;
currentPage = i;
pageNumber.setAttribute("id", "current-page");
displayTasks();
});
pageNumbersWrapper.appendChild(pageNumber);
}
} else {
let pageNumberSpacing = document.createElement("div");
pageNumberSpacing.classList.add("page-number-spacing");
pageNumberSpacing.innerText = "...";
if (currentPage !== 1) {
let firstPageNumber = document.createElement("a");
firstPageNumber.classList.add("page-number");
firstPageNumber.innerText = 1;
firstPageNumber.addEventListener("click", function() {
currentPageNumberNode.removeAttribute("id");
currentPageNumberNode = pageNumber;
currentPage = 1;
pageNumber.setAttribute("id", "current-page");
displayTasks();
});
pageNumbersWrapper.appendChild(firstPageNumber);
pageNumbersWrapper.appendChild(pageNumberSpacing);
}
let currentPageNumber = document.createElement("a");
currentPageNumber.classList.add("page-number");
currentPageNumber.setAttribute("id", "current-page");
currentPageNumber.innerText = currentPage;
pageNumbersWrapper.appendChild(currentPageNumber);
if (currentPage != taskPagesCache.pageCount) {
pageNumbersWrapper.appendChild(pageNumberSpacing.cloneNode(true));
let lastPageNumber = document.createElement("a");
lastPageNumber.classList.add("page-number");
lastPageNumber.innerText = taskPagesCache.pageCount;
lastPageNumber.addEventListener("click", function() {
currentPageNumberNode.removeAttribute("id");
currentPageNumberNode = lastPageNumber;
lastPageNumber.setAttribute("id", "current-page");
currentPage = taskPagesCache.pageCount;
displayTasks();
});
pageNumbersWrapper.appendChild(lastPageNumber);
}
}
currentPageNumberNode = document.getElementById("current-page");
}
/* Function adds all the necessary tasks 'click' event listeners*/
function addTasksClickHandlers() {
// adds confirmHelp click event listener to confirm help buttons
const confirmHelpButtons = document.getElementsByClassName("confirm-help");
for (let i = 0; i < confirmHelpButtons.length; i++){
confirmHelpButtons[i].addEventListener("click", function(e) {
confirmHelp(e.target);
});
}
// adds exitHelp click event listener to exit help buttons
const exitHelpButtons = document.getElementsByClassName("exit-help");
for (let i = 0; i < exitHelpButtons.length; i++) {
exitHelpButtons[i].addEventListener("click", function(e) {
exitHelp(e.target);
});
}
// adds helpOut click event listener to help out buttons
const helpOutButtons = document.getElementsByClassName("help-out");
for (let i = 0; i < helpOutButtons.length; i++) {
if (!helpOutButtons[i].classList.contains("disable-help")) {
helpOutButtons[i].addEventListener("click", function(e) {
helpOut(e.target);
});
}
}
}
/* Helper function that determines if the current user's location is known */
function userLocationIsKnown() {
return (userLocation !== null);
}
| add helper functions to avoid repetitive code and add resize event listener
| src/main/webapp/homepage_script.js | add helper functions to avoid repetitive code and add resize event listener | <ide><path>rc/main/webapp/homepage_script.js
<ide> let taskPagesCache = null;
<ide> let currentPageNumberNode = null;
<ide>
<add>window.addEventListener("resize", displayPaginationUI);
<add>
<ide> //window.onscroll = stickyControlBar;
<ide>
<ide> /* Scroll function so that the control bar sticks to the top of the page */
<ide> // adds distance radius change event
<ide> document.getElementById("distance-radius").addEventListener("change", function(e) {
<ide> fetchTasks(currentCategory, e.target.value)
<del> .then(response => displayTasks(response));
<add> .then(response => {
<add> displayTasks(response);
<add> displayPaginationUI();
<add> });
<ide> });
<ide>
<ide> // adds nextPage and prevPage click events
<ide> if (currentPage > 1) {
<ide> currentPage--;
<ide> displayTasks();
<del> if (currentPage === 1) {
<del> let prevPageButton = document.getElementById("prev-page");
<del> prevPageButton.style.cursor = "not-allowed";
<del> prevPageButton.setAttribute("title", "You are already on the first page");
<del> }
<del> if (currentPage < taskPagesCache.pageCount) {
<del> let nextPageButton = document.getElementById("next-page");
<del> nextPageButton.style.cursor = "pointer";
<del> nextPageButton.removeAttribute("title");
<del> }
<ide> displayPaginationUI();
<ide> }
<ide> }
<ide> if (currentPage < taskPagesCache.pageCount) {
<ide> currentPage++;
<ide> displayTasks();
<del> document.getElementById("prev-page").style.cursor = "pointer";
<del> document.getElementById("prev-page").removeAttribute("title");
<del> if (currentPage === taskPagesCache.pageCount) {
<del> let nextPageButton = document.getElementById("next-page");
<del> nextPageButton.style.cursor = "not-allowed";
<del> nextPageButton.setAttribute("title", "You are already on the last page");
<del> }
<ide> displayPaginationUI();
<ide> }
<ide> }
<ide> // only fetches tasks if user's location has been retrieved
<ide> if (userLocationIsKnown()) {
<ide> fetchTasks(category, currentMiles)
<del> .then(response => displayTasks(response));
<add> .then(response => {
<add> displayTasks(response);
<add> displayPaginationUI();
<add> });
<ide> }
<ide> // Unhighlights and resets styling for all category buttons
<ide> const categoryButtons = document.getElementsByClassName("categories");
<ide> }
<ide> // fetches tasks again if user's current location was successfully retrieved and stored
<ide> else if (userLocationIsKnown()) {
<del> fetchTasks(currentCategory, currentMiles).then(response => displayTasks(response));
<add> fetchTasks(currentCategory, currentMiles).then(response => {
<add> displayTasks(response);
<add> displayPaginationUI();
<add> });
<ide> }
<ide> });
<ide> }
<ide> // fetchTasks and displayTasks
<ide> window.initialize = function () {
<ide> getUserLocation().then(() => fetchTasks(currentCategory, currentMiles))
<del> .then(jsonresponse => displayTasks(jsonresponse))
<add> .then(response => {
<add> displayTasks(response);
<add> displayPaginationUI();
<add> })
<ide> .catch(() => {
<ide> console.error("User location could not be retrieved");
<ide> document.getElementById("location-missing-message").style.display = "block";
<ide> });
<ide> }
<ide>
<add>/* Function used as a fallback to retrieve the user's location by IP address */
<ide> function locationByIPSuccesful() {
<ide> let url = "https://www.googleapis.com/geolocation/v1/geolocate?key=" + MAPSKEY;
<ide> const request = new Request(url, {method: "POST"});
<ide>
<ide> /* Displays the tasks received from the server response */
<ide> function displayTasks(response) {
<del>
<ide> // If a response is passed, the the taskPagesCache is updated along with the next and prev page buttons
<ide> if (response !== undefined) {
<ide> taskPagesCache = response;
<del> displayPaginationUI();
<add> // If displayTasks is called and the result has less pages than the page user was last at, the currentPage will get reset to 1
<add> if (currentPage > taskPagesCache.pageCount) {
<add> currentPage = 1;
<add> }
<ide> }
<ide> if (taskPagesCache !== null && taskPagesCache.taskCount > 0) {
<ide> document.getElementById("no-tasks-message").style.display = "none";
<ide> }
<ide> }
<ide>
<add>/** Function loads and displays all the pagination UI components (page numbers and buttons) */
<ide> function displayPaginationUI() {
<del> let nextPageButton = document.getElementById("next-page");
<del> let prevPageButton = document.getElementById("prev-page");
<del> if (taskPagesCache.pageCount > 1) {
<del> nextPageButton.style.cursor = "pointer";
<del> nextPageButton.removeAttribute("title");
<del> } else {
<del> nextPageButton.style.cursor = "not-allowed";
<del> nextPageButton.setAttribute("title", "You are already on the last page");
<del> }
<del> // If displayTasks is called and the result has less pages than the page user was last at, the currentPage will get reset to 1
<del> if (currentPage > taskPagesCache.pageCount) {
<del> currentPage = 1;
<del> }
<del> if (currentPage > 1) {
<del> prevPageButton.style.cursor = "pointer";
<del> prevPageButton.removeAttribute("title");
<del> } else {
<del> prevPageButton.style.cursor = "not-allowed";
<del> prevPageButton.setAttribute("title", "You are already on the first page");
<del> }
<del>
<add> updatePageButtonStyling();
<ide> let pageNumbersWrapper = document.getElementById("page-numbers-wrapper");
<ide> pageNumbersWrapper.innerHTML = "";
<del> if (taskPagesCache.pageCount <= 5 || (window.innerWidth >= 375)) {
<del> for (let i = 1; i <= taskPagesCache.pageCount; i++) {
<del> let pageNumber = document.createElement("a");
<del> pageNumber.classList.add("page-number");
<del> if (i === currentPage) pageNumber.setAttribute("id", "current-page");
<del> pageNumber.innerText = i;
<del> pageNumber.addEventListener("click", function() {
<del> currentPageNumberNode.removeAttribute("id");
<del> currentPageNumberNode = pageNumber;
<del> currentPage = i;
<del> pageNumber.setAttribute("id", "current-page");
<del> displayTasks();
<del> });
<del> pageNumbersWrapper.appendChild(pageNumber);
<del> }
<del> } else {
<add>
<add> // Page numbers displayed for smaller screens with several pages should
<add> // show only the first, current, and last page
<add> if (taskPagesCache.pageCount >= 5 && (window.innerWidth < 375)) {
<ide> let pageNumberSpacing = document.createElement("div");
<ide> pageNumberSpacing.classList.add("page-number-spacing");
<ide> pageNumberSpacing.innerText = "...";
<ide>
<add> // Only displays first page if the current page isn't already the first page
<ide> if (currentPage !== 1) {
<del> let firstPageNumber = document.createElement("a");
<del> firstPageNumber.classList.add("page-number");
<del> firstPageNumber.innerText = 1;
<del> firstPageNumber.addEventListener("click", function() {
<del> currentPageNumberNode.removeAttribute("id");
<del> currentPageNumberNode = pageNumber;
<del> currentPage = 1;
<del> pageNumber.setAttribute("id", "current-page");
<del> displayTasks();
<del> });
<add> let firstPageNumber = createPageNumberElement(1);
<ide> pageNumbersWrapper.appendChild(firstPageNumber);
<ide> pageNumbersWrapper.appendChild(pageNumberSpacing);
<ide> }
<ide>
<del> let currentPageNumber = document.createElement("a");
<del> currentPageNumber.classList.add("page-number");
<del> currentPageNumber.setAttribute("id", "current-page");
<del> currentPageNumber.innerText = currentPage;
<add> let currentPageNumber = createPageNumberElement(currentPage);
<ide> pageNumbersWrapper.appendChild(currentPageNumber);
<ide>
<add> // Only displays last page if the current page isn't already the last page
<ide> if (currentPage != taskPagesCache.pageCount) {
<ide> pageNumbersWrapper.appendChild(pageNumberSpacing.cloneNode(true));
<del> let lastPageNumber = document.createElement("a");
<del> lastPageNumber.classList.add("page-number");
<del> lastPageNumber.innerText = taskPagesCache.pageCount;
<del> lastPageNumber.addEventListener("click", function() {
<del> currentPageNumberNode.removeAttribute("id");
<del> currentPageNumberNode = lastPageNumber;
<del> lastPageNumber.setAttribute("id", "current-page");
<del> currentPage = taskPagesCache.pageCount;
<del> displayTasks();
<del> });
<add> let lastPageNumber = createPageNumberElement(taskPagesCache.pageCount);
<ide> pageNumbersWrapper.appendChild(lastPageNumber);
<ide> }
<add>
<add> // Displays all page numbers if less than 5 pages or if the screen is big enough
<add> } else {
<add> for (let i = 1; i <= taskPagesCache.pageCount; i++) {
<add> let pageNumber = createPageNumberElement(i);
<add> pageNumbersWrapper.appendChild(pageNumber);
<add> }
<ide> }
<ide> currentPageNumberNode = document.getElementById("current-page");
<add>}
<add>
<add>/** Helper function that creates a page number element when provided with the page number */
<add>function createPageNumberElement(number) {
<add> let pageNumber = document.createElement("a");
<add> pageNumber.classList.add("page-number");
<add> pageNumber.innerText = number;
<add> if (currentPage === number) pageNumber.setAttribute("id", "current-page");
<add> pageNumber.addEventListener("click", function() {
<add> currentPageNumberNode.removeAttribute("id");
<add> currentPageNumberNode = pageNumber;
<add> currentPage = number;
<add> pageNumber.setAttribute("id", "current-page");
<add> displayPaginationUI();
<add> displayTasks();
<add> });
<add> return pageNumber;
<add>}
<add>
<add>/** Function Updates the attributes and styles of the next and prev page buttons upon page changes */
<add>function updatePageButtonStyling() {
<add> let nextPageButton = document.getElementById("next-page");
<add> let prevPageButton = document.getElementById("prev-page");
<add> if (currentPage === taskPagesCache.pageCount) {
<add> nextPageButton.style.cursor = "not-allowed";
<add> nextPageButton.setAttribute("title", "You are already on the last page");
<add> } else {
<add> nextPageButton.style.cursor = "pointer";
<add> nextPageButton.removeAttribute("title");
<add> }
<add> if (currentPage === 1) {
<add> prevPageButton.style.cursor = "not-allowed";
<add> prevPageButton.setAttribute("title", "You are already on the first page");
<add> } else {
<add> prevPageButton.style.cursor = "pointer";
<add> prevPageButton.removeAttribute("title");
<add> }
<ide> }
<ide>
<ide> /* Function adds all the necessary tasks 'click' event listeners*/ |
|
Java | apache-2.0 | 8c8e2f74cff83e68908f9af0b837cdb49b52df27 | 0 | mdamt/pdfbox,veraPDF/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,ChunghwaTelecom/pdfbox,gavanx/pdflearn,ZhenyaM/veraPDF-pdfbox,joansmith/pdfbox,torakiki/sambox,benmccann/pdfbox,ChunghwaTelecom/pdfbox,joansmith/pdfbox,mathieufortin01/pdfbox,benmccann/pdfbox,veraPDF/veraPDF-pdfbox,gavanx/pdflearn,ZhenyaM/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,mathieufortin01/pdfbox,mdamt/pdfbox,torakiki/sambox | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.encryption;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.Security;
import java.util.HashMap;
import java.util.Map;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
/**
* Manages security handlers for the application.
* It follows the singleton pattern.
* To be usable, security managers must be registered in it.
* Security managers are retrieved by the application when necessary.
*
* @author Benoit Guillon
* @author John Hewson
*/
public final class SecurityHandlerFactory
{
/** Singleton instance */
public static final SecurityHandlerFactory INSTANCE = new SecurityHandlerFactory();
static
{
Security.addProvider(new BouncyCastleProvider());
}
private final Map<String, Class<? extends SecurityHandler>> nameToHandler =
new HashMap<String, Class<? extends SecurityHandler>>();
private final Map<Class<? extends ProtectionPolicy>,
Class<? extends SecurityHandler>> policyToHandler =
new HashMap<Class<? extends ProtectionPolicy>,
Class<? extends SecurityHandler>>();
private SecurityHandlerFactory()
{
registerHandler(StandardSecurityHandler.FILTER,
StandardSecurityHandler.class,
StandardProtectionPolicy.class);
registerHandler(PublicKeySecurityHandler.FILTER,
PublicKeySecurityHandler.class,
PublicKeyProtectionPolicy.class);
}
/**
* Registers a security handler.
*
* If the security handler was already registered an exception is thrown.
* If another handler was previously registered for the same filter name or
* for the same policy name, an exception is thrown
*
* @param name the name of the filter
* @param securityHandler security handler class to register
* @param protectionPolicy protection policy class to register
*/
public void registerHandler(String name,
Class<? extends SecurityHandler> securityHandler,
Class<? extends ProtectionPolicy> protectionPolicy)
{
if (nameToHandler.containsKey(name))
{
throw new IllegalStateException("The security handler name is already registered");
}
nameToHandler.put(name, securityHandler);
policyToHandler.put(protectionPolicy, securityHandler);
}
/**
* Returns a new security handler for the given protection policy, or null none is available.
* @param policy the protection policy for which to create a security handler
* @return a new SecurityHandler instance, or null if none is available
*/
public SecurityHandler newSecurityHandlerForPolicy(ProtectionPolicy policy)
{
Class<? extends SecurityHandler> handlerClass = policyToHandler.get(policy.getClass());
if (handlerClass == null)
{
return null;
}
Class<?>[] argsClasses = { policy.getClass() };
Object[] args = { policy };
return newSecurityHandler(handlerClass, argsClasses, args);
}
/**
* Returns a new security handler for the given Filter name, or null none is available.
* @param name the Filter name from the PDF encryption dictionary
* @return a new SecurityHandler instance, or null if none is available
*/
public SecurityHandler newSecurityHandlerForFilter(String name)
{
Class<? extends SecurityHandler> handlerClass = nameToHandler.get(name);
if (handlerClass == null)
{
return null;
}
Class<?>[] argsClasses = { };
Object[] args = { };
return newSecurityHandler(handlerClass, argsClasses, args);
}
/* Returns a new security handler for the given parameters, or null none is available.
*
* @param handlerClass the handler class.
* @param argsClasses the parameter array.
* @param args array of objects to be passed as arguments to the constructor call.
* @return a new SecurityHandler instance, or null if none is available.
*/
private SecurityHandler newSecurityHandler(Class<? extends SecurityHandler> handlerClass,
Class<?>[] argsClasses, Object[] args)
{
try
{
Constructor<? extends SecurityHandler> ctor =
handlerClass.getDeclaredConstructor(argsClasses);
return ctor.newInstance(args);
}
catch(NoSuchMethodException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(IllegalAccessException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(InstantiationException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(InvocationTargetException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
}
}
| pdfbox/src/main/java/org/apache/pdfbox/pdmodel/encryption/SecurityHandlerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.encryption;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.security.Security;
import java.util.HashMap;
import java.util.Map;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
/**
* Manages security handlers for the application.
* It follows the singleton pattern.
* To be usable, security managers must be registered in it.
* Security managers are retrieved by the application when necessary.
*
* @author Benoit Guillon
* @author John Hewson
*/
public final class SecurityHandlerFactory
{
/** Singleton instance */
public static final SecurityHandlerFactory INSTANCE = new SecurityHandlerFactory();
static
{
Security.addProvider(new BouncyCastleProvider());
}
private final Map<String, Class<? extends SecurityHandler>> nameToHandler =
new HashMap<String, Class<? extends SecurityHandler>>();
private final Map<Class<? extends ProtectionPolicy>,
Class<? extends SecurityHandler>> policyToHandler =
new HashMap<Class<? extends ProtectionPolicy>,
Class<? extends SecurityHandler>>();
private SecurityHandlerFactory()
{
registerHandler(StandardSecurityHandler.FILTER,
StandardSecurityHandler.class,
StandardProtectionPolicy.class);
registerHandler(PublicKeySecurityHandler.FILTER,
PublicKeySecurityHandler.class,
PublicKeyProtectionPolicy.class);
}
/**
* Registers a security handler.
*
* If the security handler was already registered an exception is thrown.
* If another handler was previously registered for the same filter name or
* for the same policy name, an exception is thrown
*
* @param name the name of the filter
* @param securityHandler security handler class to register
* @param protectionPolicy protection policy class to register
*/
public void registerHandler(String name,
Class<? extends SecurityHandler> securityHandler,
Class<? extends ProtectionPolicy> protectionPolicy)
{
if (nameToHandler.containsKey(name))
{
throw new IllegalStateException("The security handler name is already registered");
}
nameToHandler.put(name, securityHandler);
policyToHandler.put(protectionPolicy, securityHandler);
}
/**
* Returns a new security handler for the given protection policy, or null none is available.
* @param policy the protection policy for which to create a security handler
* @return a new SecurityHandler instance, or null if none is available
*/
public SecurityHandler newSecurityHandlerForPolicy(ProtectionPolicy policy)
{
Class<? extends SecurityHandler> handlerClass = policyToHandler.get(policy.getClass());
if (handlerClass == null)
{
return null;
}
Class<?>[] argsClasses = { policy.getClass() };
Object[] args = { policy };
return newSecurityHandler(handlerClass, argsClasses, args);
}
/**
* Returns a new security handler for the given Filter name, or null none is available.
* @param name the Filter name from the PDF encryption dictionary
* @return a new SecurityHandler instance, or null if none is available
*/
public SecurityHandler newSecurityHandlerForFilter(String name)
{
Class<? extends SecurityHandler> handlerClass = nameToHandler.get(name);
if (handlerClass == null)
{
return null;
}
Class<?>[] argsClasses = { };
Object[] args = { };
return newSecurityHandler(handlerClass, argsClasses, args);
}
/* Returns a new security handler for the given parameters, or null none is available.
*
* @param handlerClass the handler class.
* @param argsClasses the parameter array.
* @param args array of objects to be passed as arguments to the constructor call.
* @return a new SecurityHandler instance, or null if none is available.
*/
private static SecurityHandler newSecurityHandler(Class<? extends SecurityHandler> handlerClass,
Class<?>[] argsClasses, Object[] args)
{
try
{
Constructor<? extends SecurityHandler> ctor =
handlerClass.getDeclaredConstructor(argsClasses);
return ctor.newInstance(args);
}
catch(NoSuchMethodException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(IllegalAccessException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(InstantiationException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
catch(InvocationTargetException e)
{
// should not happen in normal operation
throw new RuntimeException(e);
}
}
}
| PDFBOX-2576: revert "make methods static" changes due to lack of consensus
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1672442 13f79535-47bb-0310-9956-ffa450edef68
| pdfbox/src/main/java/org/apache/pdfbox/pdmodel/encryption/SecurityHandlerFactory.java | PDFBOX-2576: revert "make methods static" changes due to lack of consensus | <ide><path>dfbox/src/main/java/org/apache/pdfbox/pdmodel/encryption/SecurityHandlerFactory.java
<ide> * @param args array of objects to be passed as arguments to the constructor call.
<ide> * @return a new SecurityHandler instance, or null if none is available.
<ide> */
<del> private static SecurityHandler newSecurityHandler(Class<? extends SecurityHandler> handlerClass,
<add> private SecurityHandler newSecurityHandler(Class<? extends SecurityHandler> handlerClass,
<ide> Class<?>[] argsClasses, Object[] args)
<ide> {
<ide> try |
|
Java | mit | c40909ef41cbc7e1ae67ed7e59a18aa50dd36b60 | 0 | conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5,conveyal/r5 | package com.conveyal.r5.labeling;
import com.conveyal.osmlib.OSMEntity;
import com.conveyal.osmlib.Way;
import com.conveyal.r5.streets.EdgeStore;
import junit.framework.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Set;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
/**
* Created by mabu on 26.11.2015.
*/
public class TraversalPermissionLabelerTest {
static TraversalPermissionLabeler traversalPermissionLabeler;
public static final EnumSet<EdgeStore.EdgeFlag> ALL = EnumSet
.of(EdgeStore.EdgeFlag.ALLOWS_BIKE, EdgeStore.EdgeFlag.ALLOWS_CAR,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR);
public static final EnumSet<EdgeStore.EdgeFlag> ALLPERMISSIONS = EnumSet
.of(EdgeStore.EdgeFlag.ALLOWS_BIKE, EdgeStore.EdgeFlag.ALLOWS_CAR,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_BIKE, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_PEDESTRIAN,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_AND_BICYCLE = EnumSet.of(
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.ALLOWS_BIKE);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_AND_CAR = EnumSet.of(
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.ALLOWS_CAR );
public static final EnumSet<EdgeStore.EdgeFlag> BICYCLE_AND_CAR = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_CAR);
public static final EnumSet<EdgeStore.EdgeFlag> NONE = EnumSet.noneOf(EdgeStore.EdgeFlag.class);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_ONLY = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN);
public static final EnumSet<EdgeStore.EdgeFlag> BICYCLE = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE);
public static final EnumSet<EdgeStore.EdgeFlag> CAR = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_CAR);
@BeforeClass
public static void setUpClass() {
traversalPermissionLabeler = new TestPermissionsLabeler();
}
@Test
public void testCyclewayPermissions() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=cycleway");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_BICYCLE);
roadFlagComparision(osmWay, "access", "destination",
EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR),
EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR));
}
@Test
public void testOnewayPermissions() {
Way osmWay = new Way();
osmWay.addTag("highway", "residential");
osmWay.addTag("oneway", "true");
osmWay.addTag("oneway:bicycle", "no");
roadFlagComparision(osmWay, ALL, PEDESTRIAN_AND_BICYCLE);
}
@Test
public void testPath() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=path;access=private");
EnumSet<EdgeStore.EdgeFlag> expectedPermissions = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
roadFlagComparision(osmWay, expectedPermissions, expectedPermissions);
}
@Test
public void testPlatform() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=platform;public_transport=platform");
roadFlagComparision(osmWay, PEDESTRIAN, PEDESTRIAN);
roadFlagComparision(osmWay, "wheelchair", "no", PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
}
@Ignore("specific tagging isn't supported yet in specific permissions")
@Test
public void testSidewalk() throws Exception {
Way osmWay = new Way();
osmWay.addTag("highway", "footway");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_BICYCLE);
//TODO: this had special permissions in OTP
osmWay = makeOSMWayFromTags("footway=sidewalk;highway=footway");
roadFlagComparision(osmWay, PEDESTRIAN, PEDESTRIAN);
}
//Sidewalks are assumed to be bidirectional so it shouldn't matter on which side of the street they are
@Test
public void testRoadWithSidewalk() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, CAR, CAR);
roadFlagComparision(osmWay, "sidewalk", "right", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "left", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "both", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "none", CAR, CAR);
roadFlagComparision(osmWay, "sidewalk", "no", CAR, CAR);
osmWay = makeOSMWayFromTags("highway=residential");
roadFlagComparision(osmWay, ALL, ALL);
//This shouldn't remove WALK permissions
roadFlagComparision(osmWay, "sidewalk", "no", ALL, ALL);
roadFlagComparision(osmWay, "sidewalk", "none", ALL, ALL);
}
@Test
public void testRoadWithBidirectionalCycleway() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, CAR, CAR);
roadFlagComparision(osmWay, "cycleway", "lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "track", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:both", "lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:both", "track", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "share_busway", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "shared_lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
}
@Test
public void testPrivateRoadWithFootBicyclePermissions() {
//Private road which can be only used as destination for motor vehicles but can be used normally for pedestrian and bicycle traffic
Way osmWay = makeOSMWayFromTags("access=private;bicycle=designated;foot=yes;highway=service;motor_vehicle=private");
EnumSet<EdgeStore.EdgeFlag> NO_THRU_CAR_PEDESTRIAN_BICYCLE = EnumSet.copyOf(PEDESTRIAN_AND_BICYCLE);
NO_THRU_CAR_PEDESTRIAN_BICYCLE.add(EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
RoadPermission roadPermission = roadFlagComparision(osmWay, NO_THRU_CAR_PEDESTRIAN_BICYCLE, NO_THRU_CAR_PEDESTRIAN_BICYCLE);
//Doesn't insert edges which don't have any permissions forward and backward
Assert.assertFalse(
Collections.disjoint(roadPermission.forward, ALLPERMISSIONS) && Collections
.disjoint(roadPermission.backward, ALLPERMISSIONS));
}
@Test
public void testSkippingRoadsWithNoPermissions() throws Exception {
Way osmWay = makeOSMWayFromTags("bicycle=no;foot=no;highway=primary;lanes=2;maxspeed=70;oneway=yes;ref=1");
RoadPermission roadPermission = roadFlagComparision(osmWay, CAR, NONE);
//Doesn't insert edges which don't have any permissions forward and backward
Assert.assertFalse(
Collections.disjoint(roadPermission.forward, ALLPERMISSIONS) && Collections
.disjoint(roadPermission.backward, ALLPERMISSIONS));
Assert.assertTrue(
Collections.disjoint(NONE, ALLPERMISSIONS) && Collections
.disjoint(NONE, ALLPERMISSIONS));
}
@Test
public void testRoadWithMonodirectionalCycleway() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, "cycleway:right", "lane", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "cycleway:right", "track", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "cycleway:left", "lane", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:left", "track", CAR, BICYCLE_AND_CAR);
osmWay = makeOSMWayFromTags("highway=residential;foot=no");
roadFlagComparision(osmWay, "bicycle:forward", "use_sidepath", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:forward", "no", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:forward", "dismount", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:backward", "use_sidepath", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "bicycle:backward", "no", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "bicycle:backward", "dismount", BICYCLE_AND_CAR, CAR);
osmWay = makeOSMWayFromTags("cycleway:right=lane;highway=residential;cycleway:left=opposite_lane;oneway=yes");
roadFlagComparision(osmWay, ALL, PEDESTRIAN_AND_BICYCLE);
roadFlagComparision(osmWay, "oneway:bicycle", "no", ALL, PEDESTRIAN_AND_BICYCLE);
osmWay = makeOSMWayFromTags("highway=tertiary;cycleway:left=lane;bicycle:forward=use_sidepath");
roadFlagComparision(osmWay, PEDESTRIAN_AND_CAR, ALL);
osmWay = makeOSMWayFromTags("highway=nobikenoped;cycleway:left=lane;bicycle:forward=use_sidepath");
roadFlagComparision(osmWay, CAR, BICYCLE_AND_CAR);
osmWay = makeOSMWayFromTags("highway=nobikenoped;foot=yes;oneway=-1;cycleway:left=opposite_lane");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_CAR);
}
@Test
public void testCyclewayNo() throws Exception {
Way osmWay = makeOSMWayFromTags("oneway=no;highway=residential;cycleway=no");
roadFlagComparision(osmWay, ALL, ALL);
}
private RoadPermission roadFlagComparision(Way osmWay, EnumSet<EdgeStore.EdgeFlag> forwardExpected,
EnumSet<EdgeStore.EdgeFlag> backwardExpected) {
return roadFlagComparision(osmWay, null, null, forwardExpected, backwardExpected);
}
/**
* Makes comparision of way with osmWay tags and newTag with newValue and compares forward and backward permissions with expected permissions
*
* Copy of osmWay is made since otherwise tags would be changed
* @param iosmWay
* @param newTag
* @param newValue
* @param forwardExpected
* @param backwardExpected
*/
private static RoadPermission roadFlagComparision(Way iosmWay, String newTag, String newValue, EnumSet<EdgeStore.EdgeFlag> forwardExpected, EnumSet<EdgeStore.EdgeFlag> backwardExpected) {
Way osmWay = new Way();
StringJoiner stringJoiner = new StringJoiner(";");
for (OSMEntity.Tag tag: iosmWay.tags) {
osmWay.addTag(tag.key, tag.value);
stringJoiner.add(tag.key+"="+tag.value);
}
if (newTag != null && newValue != null) {
osmWay.addTag(newTag, newValue);
stringJoiner.add(newTag+"="+newValue);
}
Set<EdgeStore.EdgeFlag> forwardFiltered;
Set<EdgeStore.EdgeFlag> backwardFiltered;
RoadPermission roadPermission = traversalPermissionLabeler.getPermissions(osmWay);
forwardFiltered = filterFlags(roadPermission.forward);
backwardFiltered = filterFlags(roadPermission.backward);
String tags = "Tags: " + stringJoiner.toString();
assertEquals(tags, forwardExpected, forwardFiltered);
assertEquals(tags, backwardExpected, backwardFiltered);
return roadPermission;
}
@Test
public void testSteps() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=steps");
roadFlagComparision(osmWay, PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
roadFlagComparision(osmWay, "wheelchair", "yes", PEDESTRIAN, PEDESTRIAN);
roadFlagComparision(osmWay, "wheelchair", "limited", PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
roadFlagComparision(osmWay, "ramp:wheelchair", "yes", PEDESTRIAN, PEDESTRIAN);
}
@Test
public void testSidepath() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=tertiary;bicycle=use_sidepath");
roadFlagComparision(osmWay, PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
}
@Test
public void testSpecificPermission() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=primary;bicycle=use_sidepath;foot=no;junction=roundabout");
roadFlagComparision(osmWay, CAR, NONE);
}
/**
* Removes all flags except permissions
* @param permissions
* @return
*/
private static Set<EdgeStore.EdgeFlag> filterFlags(EnumSet<EdgeStore.EdgeFlag> permissions) {
return permissions.stream()
.filter(ALLPERMISSIONS::contains)
.collect(Collectors.toSet());
}
/**
* Creates osmway based on provided tags
*
* For example: footway=sidewalk;highway=footway
* This adds two tags footway=sidewalk and highway=footway. Order doesn't matter.
* @param tags with tags separated with ; and tag and value separated with =
* @return
*/
protected static Way makeOSMWayFromTags(String tags) {
Way osmWay = new Way();
String[] pairs = tags.split(";");
for (String pair : pairs) {
String[] kv = pair.split("=");
osmWay.addTag(kv[0], kv[1]);
}
return osmWay;
}
}
| src/test/java/com/conveyal/r5/labeling/TraversalPermissionLabelerTest.java | package com.conveyal.r5.labeling;
import com.conveyal.osmlib.OSMEntity;
import com.conveyal.osmlib.Way;
import com.conveyal.r5.streets.EdgeStore;
import junit.framework.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Set;
import java.util.StringJoiner;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
/**
* Created by mabu on 26.11.2015.
*/
public class TraversalPermissionLabelerTest {
static TraversalPermissionLabeler traversalPermissionLabeler;
public static final EnumSet<EdgeStore.EdgeFlag> ALL = EnumSet
.of(EdgeStore.EdgeFlag.ALLOWS_BIKE, EdgeStore.EdgeFlag.ALLOWS_CAR,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR);
public static final EnumSet<EdgeStore.EdgeFlag> ALLPERMISSIONS = EnumSet
.of(EdgeStore.EdgeFlag.ALLOWS_BIKE, EdgeStore.EdgeFlag.ALLOWS_CAR,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_BIKE, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_PEDESTRIAN,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_AND_BICYCLE = EnumSet.of(
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.ALLOWS_BIKE);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_AND_CAR = EnumSet.of(
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.ALLOWS_CAR );
public static final EnumSet<EdgeStore.EdgeFlag> BICYCLE_AND_CAR = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_CAR);
public static final EnumSet<EdgeStore.EdgeFlag> NONE = EnumSet.noneOf(EdgeStore.EdgeFlag.class);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR);
public static final EnumSet<EdgeStore.EdgeFlag> PEDESTRIAN_ONLY = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN);
public static final EnumSet<EdgeStore.EdgeFlag> BICYCLE = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE);
public static final EnumSet<EdgeStore.EdgeFlag> CAR = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_CAR);
@BeforeClass
public static void setUpClass() {
traversalPermissionLabeler = new TestPermissionsLabeler();
}
@Test
public void testCyclewayPermissions() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=cycleway");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_BICYCLE);
roadFlagComparision(osmWay, "access", "destination",
EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR),
EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR, EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR));
}
@Test
public void testOnewayPermissions() {
Way osmWay = new Way();
osmWay.addTag("highway", "residential");
osmWay.addTag("oneway", "true");
osmWay.addTag("oneway:bicycle", "no");
roadFlagComparision(osmWay, ALL, PEDESTRIAN_AND_BICYCLE);
}
@Test
public void testPath() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=path;access=private");
EnumSet<EdgeStore.EdgeFlag> expectedPermissions = EnumSet.of(EdgeStore.EdgeFlag.ALLOWS_BIKE,
EdgeStore.EdgeFlag.ALLOWS_PEDESTRIAN, EdgeStore.EdgeFlag.ALLOWS_WHEELCHAIR,
EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
roadFlagComparision(osmWay, expectedPermissions, expectedPermissions);
}
@Test
public void testPlatform() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=platform;public_transport=platform");
roadFlagComparision(osmWay, PEDESTRIAN, PEDESTRIAN);
roadFlagComparision(osmWay, "wheelchair", "no", PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
}
@Ignore("specific tagging isn't supported yet in specific permissions")
@Test
public void testSidewalk() throws Exception {
Way osmWay = new Way();
osmWay.addTag("highway", "footway");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_BICYCLE);
//TODO: this had special permissions in OTP
osmWay = makeOSMWayFromTags("footway=sidewalk;highway=footway");
roadFlagComparision(osmWay, PEDESTRIAN, PEDESTRIAN);
}
//Sidewalks are assumed to be bidirectional so it shouldn't matter on which side of the street they are
@Test
public void testRoadWithSidewalk() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, CAR, CAR);
roadFlagComparision(osmWay, "sidewalk", "right", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "left", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "both", PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
roadFlagComparision(osmWay, "sidewalk", "none", CAR, CAR);
roadFlagComparision(osmWay, "sidewalk", "no", CAR, CAR);
osmWay = makeOSMWayFromTags("highway=residential");
roadFlagComparision(osmWay, ALL, ALL);
//This shouldn't remove WALK permissions
roadFlagComparision(osmWay, "sidewalk", "no", ALL, ALL);
roadFlagComparision(osmWay, "sidewalk", "none", ALL, ALL);
}
@Test
public void testRoadWithBidirectionalCycleway() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, CAR, CAR);
roadFlagComparision(osmWay, "cycleway", "lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "track", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:both", "lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:both", "track", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "share_busway", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway", "shared_lane", BICYCLE_AND_CAR, BICYCLE_AND_CAR);
}
@Test
public void testSkippingRoadsWithNoPermissions() throws Exception {
Way osmWay = makeOSMWayFromTags("bicycle=no;foot=no;highway=primary;lanes=2;maxspeed=70;oneway=yes;ref=1");
RoadPermission roadPermission = roadFlagComparision(osmWay, CAR, NONE);
//Doesn't insert edges which don't have any permissions forward and backward
Assert.assertFalse(
Collections.disjoint(roadPermission.forward, ALLPERMISSIONS) && Collections
.disjoint(roadPermission.backward, ALLPERMISSIONS));
Assert.assertTrue(
Collections.disjoint(NONE, ALLPERMISSIONS) && Collections
.disjoint(NONE, ALLPERMISSIONS));
}
@Test
public void testRoadWithMonodirectionalCycleway() {
Way osmWay = makeOSMWayFromTags("highway=nobikenoped");
roadFlagComparision(osmWay, "cycleway:right", "lane", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "cycleway:right", "track", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "cycleway:left", "lane", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "cycleway:left", "track", CAR, BICYCLE_AND_CAR);
osmWay = makeOSMWayFromTags("highway=residential;foot=no");
roadFlagComparision(osmWay, "bicycle:forward", "use_sidepath", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:forward", "no", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:forward", "dismount", CAR, BICYCLE_AND_CAR);
roadFlagComparision(osmWay, "bicycle:backward", "use_sidepath", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "bicycle:backward", "no", BICYCLE_AND_CAR, CAR);
roadFlagComparision(osmWay, "bicycle:backward", "dismount", BICYCLE_AND_CAR, CAR);
osmWay = makeOSMWayFromTags("cycleway:right=lane;highway=residential;cycleway:left=opposite_lane;oneway=yes");
roadFlagComparision(osmWay, ALL, PEDESTRIAN_AND_BICYCLE);
roadFlagComparision(osmWay, "oneway:bicycle", "no", ALL, PEDESTRIAN_AND_BICYCLE);
osmWay = makeOSMWayFromTags("highway=tertiary;cycleway:left=lane;bicycle:forward=use_sidepath");
roadFlagComparision(osmWay, PEDESTRIAN_AND_CAR, ALL);
osmWay = makeOSMWayFromTags("highway=nobikenoped;cycleway:left=lane;bicycle:forward=use_sidepath");
roadFlagComparision(osmWay, CAR, BICYCLE_AND_CAR);
osmWay = makeOSMWayFromTags("highway=nobikenoped;foot=yes;oneway=-1;cycleway:left=opposite_lane");
roadFlagComparision(osmWay, PEDESTRIAN_AND_BICYCLE, PEDESTRIAN_AND_CAR);
}
@Test
public void testCyclewayNo() throws Exception {
Way osmWay = makeOSMWayFromTags("oneway=no;highway=residential;cycleway=no");
roadFlagComparision(osmWay, ALL, ALL);
}
private RoadPermission roadFlagComparision(Way osmWay, EnumSet<EdgeStore.EdgeFlag> forwardExpected,
EnumSet<EdgeStore.EdgeFlag> backwardExpected) {
return roadFlagComparision(osmWay, null, null, forwardExpected, backwardExpected);
}
/**
* Makes comparision of way with osmWay tags and newTag with newValue and compares forward and backward permissions with expected permissions
*
* Copy of osmWay is made since otherwise tags would be changed
* @param iosmWay
* @param newTag
* @param newValue
* @param forwardExpected
* @param backwardExpected
*/
private static RoadPermission roadFlagComparision(Way iosmWay, String newTag, String newValue, EnumSet<EdgeStore.EdgeFlag> forwardExpected, EnumSet<EdgeStore.EdgeFlag> backwardExpected) {
Way osmWay = new Way();
StringJoiner stringJoiner = new StringJoiner(";");
for (OSMEntity.Tag tag: iosmWay.tags) {
osmWay.addTag(tag.key, tag.value);
stringJoiner.add(tag.key+"="+tag.value);
}
if (newTag != null && newValue != null) {
osmWay.addTag(newTag, newValue);
stringJoiner.add(newTag+"="+newValue);
}
Set<EdgeStore.EdgeFlag> forwardFiltered;
Set<EdgeStore.EdgeFlag> backwardFiltered;
RoadPermission roadPermission = traversalPermissionLabeler.getPermissions(osmWay);
forwardFiltered = filterFlags(roadPermission.forward);
backwardFiltered = filterFlags(roadPermission.backward);
String tags = "Tags: " + stringJoiner.toString();
assertEquals(tags, forwardExpected, forwardFiltered);
assertEquals(tags, backwardExpected, backwardFiltered);
return roadPermission;
}
@Test
public void testSteps() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=steps");
roadFlagComparision(osmWay, PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
roadFlagComparision(osmWay, "wheelchair", "yes", PEDESTRIAN, PEDESTRIAN);
roadFlagComparision(osmWay, "wheelchair", "limited", PEDESTRIAN_ONLY, PEDESTRIAN_ONLY);
roadFlagComparision(osmWay, "ramp:wheelchair", "yes", PEDESTRIAN, PEDESTRIAN);
}
@Test
public void testSidepath() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=tertiary;bicycle=use_sidepath");
roadFlagComparision(osmWay, PEDESTRIAN_AND_CAR, PEDESTRIAN_AND_CAR);
}
@Test
public void testSpecificPermission() throws Exception {
Way osmWay = makeOSMWayFromTags("highway=primary;bicycle=use_sidepath;foot=no;junction=roundabout");
roadFlagComparision(osmWay, CAR, NONE);
}
/**
* Removes all flags except permissions
* @param permissions
* @return
*/
private static Set<EdgeStore.EdgeFlag> filterFlags(EnumSet<EdgeStore.EdgeFlag> permissions) {
return permissions.stream()
.filter(ALLPERMISSIONS::contains)
.collect(Collectors.toSet());
}
/**
* Creates osmway based on provided tags
*
* For example: footway=sidewalk;highway=footway
* This adds two tags footway=sidewalk and highway=footway. Order doesn't matter.
* @param tags with tags separated with ; and tag and value separated with =
* @return
*/
protected static Way makeOSMWayFromTags(String tags) {
Way osmWay = new Way();
String[] pairs = tags.split(";");
for (String pair : pairs) {
String[] kv = pair.split("=");
osmWay.addTag(kv[0], kv[1]);
}
return osmWay;
}
}
| Add new test for specificPermissions
Car destination but normally traversable for pedestrian and bicycle
| src/test/java/com/conveyal/r5/labeling/TraversalPermissionLabelerTest.java | Add new test for specificPermissions | <ide><path>rc/test/java/com/conveyal/r5/labeling/TraversalPermissionLabelerTest.java
<ide> }
<ide>
<ide> @Test
<add> public void testPrivateRoadWithFootBicyclePermissions() {
<add> //Private road which can be only used as destination for motor vehicles but can be used normally for pedestrian and bicycle traffic
<add> Way osmWay = makeOSMWayFromTags("access=private;bicycle=designated;foot=yes;highway=service;motor_vehicle=private");
<add>
<add> EnumSet<EdgeStore.EdgeFlag> NO_THRU_CAR_PEDESTRIAN_BICYCLE = EnumSet.copyOf(PEDESTRIAN_AND_BICYCLE);
<add> NO_THRU_CAR_PEDESTRIAN_BICYCLE.add(EdgeStore.EdgeFlag.NO_THRU_TRAFFIC_CAR);
<add>
<add>
<add> RoadPermission roadPermission = roadFlagComparision(osmWay, NO_THRU_CAR_PEDESTRIAN_BICYCLE, NO_THRU_CAR_PEDESTRIAN_BICYCLE);
<add>
<add> //Doesn't insert edges which don't have any permissions forward and backward
<add> Assert.assertFalse(
<add> Collections.disjoint(roadPermission.forward, ALLPERMISSIONS) && Collections
<add> .disjoint(roadPermission.backward, ALLPERMISSIONS));
<add> }
<add>
<add> @Test
<ide> public void testSkippingRoadsWithNoPermissions() throws Exception {
<ide> Way osmWay = makeOSMWayFromTags("bicycle=no;foot=no;highway=primary;lanes=2;maxspeed=70;oneway=yes;ref=1");
<ide> RoadPermission roadPermission = roadFlagComparision(osmWay, CAR, NONE); |
|
JavaScript | mit | 1389229cb471f7a13c61660562507aedb3543b84 | 0 | tpoikela/battles,tpoikela/battles,tpoikela/battles,tpoikela/battles,tpoikela/battles |
const RG = require('./rg');
const ROT = require('../../lib/rot');
/* A OO wrapper around ROT.RNG. Adds method for serialisation. */
RG.Random = function() {
this.seed = 0;
this.rng = ROT.RNG.clone();
this.rng.setSeed(this.seed);
};
RG.Random.prototype.setSeed = function(seed) {
this.rng.setSeed(seed);
};
/* Return random property from the object.*/
RG.Random.prototype.randProp = function(obj) {
const keys = Object.keys(obj);
const keyIndex = this.randIndex(keys);
return obj[keys[keyIndex]];
};
/* Returns a random entry from the array.*/
RG.Random.prototype.arrayGetRand = function(arr) {
const randIndex = this.randIndex(arr);
return arr[randIndex];
};
RG.Random.prototype.getUniformInt = function(min, max) {
return this.rng.getUniformInt(min, max);
};
/* Returns a random index number from given array. */
RG.Random.prototype.randIndex = function randIndex(arr) {
return Math.floor(this.rng.getUniform() * arr.length);
};
RG.Random.prototype.getUniform = function() {
return this.rng.getUniform();
};
RG.Random.prototype.getNormal = function(mean, stddev) {
return this.rng.getNormal(mean, stddev);
};
/* Given a number N, returns an integer from 0 to N weighted such that N has the
* highest weight, and 0 the lowest.
*/
RG.Random.prototype.getWeightedLinear = function(N) {
const weights = {};
for (let i = 0; i < N; i++) {
weights[i] = i + 1; // Without + 1, 0 will never be chosen
}
return this.rng.getWeightedValue(weights);
};
RG.Random.prototype.toJSON = function() {
return {
seed: this.seed,
state: this.rng.getState()
};
};
RG.RAND = new RG.Random();
module.exports = RG.Random;
| client/src/random.js |
const RG = require('./rg');
const ROT = require('../../lib/rot');
/* A OO wrapper around ROT.RNG. Adds method for serialisation. */
RG.Random = function() {
this.seed = 0;
this.rng = ROT.RNG.clone();
this.rng.setSeed(this.seed);
};
RG.Random.prototype.setSeed = function(seed) {
this.rng.setSeed(seed);
};
/* Return random property from the object.*/
RG.Random.prototype.randProp = function(obj) {
const keys = Object.keys(obj);
const keyIndex = this.randIndex(keys);
return obj[keys[keyIndex]];
};
/* Returns a random entry from the array.*/
RG.Random.prototype.arrayGetRand = function(arr) {
const randIndex = this.randIndex(arr);
return arr[randIndex];
};
RG.Random.prototype.getUniformInt = function(min, max) {
return this.rng.getUniformInt(min, max);
};
/* Returns a random index number from given array. */
RG.Random.prototype.randIndex = function randIndex(arr) {
return Math.floor(this.rng.getUniform() * arr.length);
};
RG.Random.prototype.getUniform = function() {
return this.rng.getUniform();
};
RG.Random.prototype.getNormal = function() {
return this.rng.getNormal();
};
/* Given a number N, returns an integer from 0 to N weighted such that N has the
* highest weight, and 0 the lowest.
*/
RG.Random.prototype.getWeightedLinear = function(N) {
};
RG.Random.prototype.toJSON = function() {
return {
seed: this.seed,
state: this.rng.getState()
};
};
RG.RAND = new RG.Random();
module.exports = RG.Random;
| Added getLinearWeighted() and added mean,stddev args to getNormal().
| client/src/random.js | Added getLinearWeighted() and added mean,stddev args to getNormal(). | <ide><path>lient/src/random.js
<ide> return this.rng.getUniform();
<ide> };
<ide>
<del>RG.Random.prototype.getNormal = function() {
<del> return this.rng.getNormal();
<add>RG.Random.prototype.getNormal = function(mean, stddev) {
<add> return this.rng.getNormal(mean, stddev);
<ide> };
<ide>
<ide> /* Given a number N, returns an integer from 0 to N weighted such that N has the
<ide> * highest weight, and 0 the lowest.
<ide> */
<ide> RG.Random.prototype.getWeightedLinear = function(N) {
<del>
<add> const weights = {};
<add> for (let i = 0; i < N; i++) {
<add> weights[i] = i + 1; // Without + 1, 0 will never be chosen
<add> }
<add> return this.rng.getWeightedValue(weights);
<ide> };
<ide>
<ide> RG.Random.prototype.toJSON = function() { |
|
JavaScript | mit | ded47c09f2820d93079bec55096eab608b2462d2 | 0 | BreemsEmporiumMensToiletriesFragrances/phosphorus,AmericanSundown/phosphorus,daukantas/phosphorus,nathan/phosphorus,nathan/phosphorus,AmericanSundown/phosphorus,nitrodragon/royroyroyroy,trumank/phosphorus,phosphorus/phosphorus.github.io,BreemsEmporiumMensToiletriesFragrances/phosphorus,phosphorus/phosphorus.github.io,daukantas/phosphorus,trumank/phosphorus,nitrodragon/royroyroyroy | var P = (function() {
'use strict';
var hasOwnProperty = {}.hasOwnProperty;
var hasTouchEvents = 'ontouchstart' in document;
var inherits = function(cla, sup) {
cla.prototype = Object.create(sup.prototype);
cla.parent = sup;
cla.base = function(self, method /*, args... */) {
return sup.prototype[method].call(self, [].slice.call(arguments, 2));
};
};
var addEvents = function(cla /*, events... */) {
[].slice.call(arguments, 1).forEach(function(event) {
addEvent(cla, event);
});
};
var addEvent = function(cla, event) {
var capital = event[0].toUpperCase() + event.substr(1);
cla.prototype.addEventListener = cla.prototype.addEventListener || function(event, listener) {
var listeners = this['$' + event] = this['$' + event] || [];
listeners.push(listener);
return this;
};
cla.prototype.removeEventListener = cla.prototype.removeEventListener || function(event, listener) {
var listeners = this['$' + event];
if (listeners) {
var i = listeners.indexOf(listener);
if (i > -1) {
listeners.splice(i, 1);
}
}
return this;
};
cla.prototype.dispatchEvent = cla.prototype.dispatchEvent || function(event, arg) {
var listeners = this['$' + event];
if (listeners) {
listeners.forEach(function(listener) {
listener(arg);
});
}
var listener = this['on' + event];
if (listener) {
listener(arg);
}
return this;
};
cla.prototype['on' + capital] = function(listener) {
this.addEventListener(event, listener);
return this;
};
cla.prototype['dispatch' + capital] = function(arg) {
this.dispatchEvent(event, arg);
return this;
};
};
var Request = function() {
this.loaded = 0;
};
addEvents(Request, 'load', 'progress', 'error');
Request.prototype.progress = function(loaded, total, lengthComputable) {
this.loaded = loaded;
this.total = total;
this.lengthComputable = lengthComputable;
this.dispatchProgress({
loaded: loaded,
total: total,
lengthComputable: lengthComputable
});
};
Request.prototype.load = function(result) {
this.result = result;
this.isDone = true;
this.dispatchLoad(result);
};
Request.prototype.error = function(error) {
this.result = error;
this.isError = true;
this.isDone = true;
this.dispatchError(error);
};
var CompositeRequest = function() {
this.requests = [];
this.isDone = true;
this.update = this.update.bind(this);
this.error = this.error.bind(this);
};
inherits(CompositeRequest, Request);
CompositeRequest.prototype.add = function(request) {
if (request instanceof CompositeRequest) {
for (var i = 0; i < request.requests.length; i++) {
this.add(request.requests[i]);
}
} else {
this.requests.push(request);
request.addEventListener('progress', this.update);
request.addEventListener('load', this.update);
request.addEventListener('error', this.error);
this.update();
}
};
CompositeRequest.prototype.update = function() {
if (this.isError) return;
var requests = this.requests;
var i = requests.length;
var total = 0;
var loaded = 0;
var lengthComputable = true;
var uncomputable = 0;
var done = 0;
while (i--) {
var r = requests[i];
loaded += r.loaded;
if (r.isDone) {
total += r.loaded;
done += 1;
} else if (r.lengthComputable) {
total += r.total;
} else {
lengthComputable = false;
uncomputable += 1;
}
}
if (!lengthComputable && uncomputable !== requests.length) {
var each = total / (requests.length - uncomputable) * uncomputable;
i = requests.length;
total = 0;
loaded = 0;
lengthComputable = true;
while (i--) {
var r = requests[i];
if (r.lengthComputable) {
loaded += r.loaded;
total += r.total;
} else {
total += each;
if (r.isDone) loaded += each;
}
}
}
this.progress(loaded, total, lengthComputable);
this.doneCount = done;
this.isDone = done === requests.length;
if (this.isDone && !this.defer) {
this.load(this.getResult());
}
};
CompositeRequest.prototype.getResult = function() {
throw new Error('Users must implement getResult()');
};
var IO = {};
IO.BASE_URL = 'http://scratch.mit.edu/internalapi/'
IO.PROJECT_URL = IO.BASE_URL + 'project/';
IO.ASSET_URL = IO.BASE_URL + 'asset/';
IO.PROXY_URL = 'proxy.php?u=';
IO.init = function(request) {
IO.projectRequest = request;
IO.zip = null;
IO.costumes = null;
IO.images = null;
};
IO.load = function(url, callback, self) {
var request = new Request;
var xhr = new XMLHttpRequest;
xhr.open('GET', IO.PROXY_URL + encodeURIComponent(url), true);
xhr.onprogress = function(e) {
request.progress(e.loaded, e.total, e.lengthComputable);
};
xhr.onload = function() {
if (xhr.status === 200) {
request.load(xhr.responseText);
} else {
request.error(new Error('HTTP ' + xhr.status + ': ' + xhr.statusText));
}
};
xhr.onerror = function() {
request.error(new Error('XHR Error'));
};
setTimeout(xhr.send.bind(xhr));
if (callback) request.onLoad(callback.bind(self));
return request;
};
IO.loadImage = function(url, callback, self) {
var request = new Request;
var image = new Image;
image.src = url;
image.onload = function() {
request.load(image);
};
image.onerror = function() {
request.error(new Error('Failed to load image'));
};
// var xhr = new XMLHttpRequest;
// xhr.open('GET', IO.PROXY_URL + encodeURIComponent(url), true);
// xhr.responseType = 'blob';
// xhr.onprogress = function(e) {
// request.progress(e.loaded, e.total, e.lengthComputable);
// };
// xhr.onload = function(e) {
// if (xhr.status === 200) {
// var reader = new FileReader;
// reader.addEventListener('loadend', function() {
// var image = new Image;
// image.src = reader.result;
// image.onload = function() {
// request.load(image);
// };
// });
// reader.readAsDataURL(xhr.response);
// } else {
// request.error(new Error('HTTP ' + xhr.status + ': ' + xhr.statusText));
// }
// };
// xhr.onerror = function() {
// request.error(new Image('Failed to load image'));
// };
// xhr.send();
if (callback) request.onLoad(callback.bind(self));
return request;
};
IO.loadScratchr2Project = function(id, callback, self) {
var request = new CompositeRequest;
IO.init(request);
request.defer = true;
request.add(IO.load(IO.PROJECT_URL + id + '/get/?' + Math.random().toString().slice(2)).onLoad(function(contents) {
try {
var json = JSON.parse(contents);
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
}));
return request;
};
IO.loadScratchr2ProjectTitle = function(id, callback, self) {
var request = new CompositeRequest;
request.defer = true;
request.add(P.IO.load('http://scratch.mit.edu/projects/' + id + '/').onLoad(function(data) {
var m = /<title>\s*(.+?)(\s+on\s+Scratch)?\s*<\/title>/.exec(data);
if (callback) request.onLoad(callback.bind(self));
if (m) {
var d = document.createElement('div');
d.innerHTML = m[1];
request.load(d.innerText);
} else {
request.error(new Error('No title'));
}
}));
return request;
};
IO.loadJSONProject = function(json, callback, self) {
var request = new CompositeRequest;
IO.init(request);
try {
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
return request;
};
IO.loadSB2Project = function(ab, callback, self) {
var request = new CompositeRequest;
IO.init(request);
try {
IO.zip = new JSZip(ab);
var json = JSON.parse(IO.zip.file('project.json').asText());
IO.images = 1; // ignore pen trails
IO.sounds = 0;
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
return request;
};
IO.loadSB2File = function(f, callback, self) {
var cr = new CompositeRequest;
cr.defer = true;
var request = new Request;
cr.add(request);
var reader = new FileReader;
reader.onloadend = function() {
cr.defer = true;
cr.add(IO.loadSB2Project(reader.result, function(result) {
cr.defer = false;
cr.getResult = function() {
return result;
};
cr.update();
}));
request.load();
};
reader.onprogress = function(e) {
request.progress(e.loaded, e.total, e.lengthComputable);
};
reader.readAsArrayBuffer(f);
if (callback) cr.onLoad(callback.bind(self));
return cr;
};
IO.loadProject = function(data) {
IO.loadArray(data.children, IO.loadObject);
IO.loadBase(data);
};
IO.loadBase = function(data) {
data.scripts = data.scripts || [];
data.costumes = IO.loadArray(data.costumes, IO.loadCostume);
data.sounds = IO.loadArray(data.sounds, IO.loadSound);
data.variables = data.variables || [];
data.lists = data.lists || [];
};
IO.loadArray = function(data, process) {
if (!data) return [];
for (var i = 0; i < data.length; i++) {
process(data[i]);
}
return data;
};
IO.loadObject = function(data) {
if (!data.cmd && !data.listName) {
IO.loadBase(data);
}
};
IO.loadCostume = function(data) {
IO.loadMD5(data.baseLayerMD5, function(asset) {
data.$image = asset;
});
if (data.textLayerMD5) {
IO.loadMD5(data.textLayerMD5, function(asset) {
data.$text = asset;
});
}
};
IO.loadSound = function() {
// TODO
};
IO.loadMD5 = function(md5, callback, zip, index) {
var ext = md5.split('.').pop();
if (ext === 'svg') {
var cb = function(source) {
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var image = new Image;
callback(image);
canvg(canvas, source, {
ignoreMouse: true,
ignoreAnimation: true,
ignoreClear: true,
renderCallback: function() {
image.src = canvas.toDataURL();
}
})
};
if (IO.zip) {
var image = IO.images;
IO.images += 1;
cb(IO.zip.file(image + '.svg').asText());
} else {
IO.projectRequest.add(IO.load(IO.ASSET_URL + md5 + '/get/', cb));
}
} else {
if (IO.zip) {
var image = IO.images;
IO.images += 1;
var request = new Request;
var f = IO.zip.file(image + '.' + ext).asBinary();
var img = new Image;
img.onload = function() {
if (callback) callback(img);
request.load();
};
img.src = 'data:image/' + (ext === 'jpg' ? 'jpeg' : ext) + ';base64,' + btoa(f);
IO.projectRequest.add(request);
} else {
IO.projectRequest.add(
IO.loadImage(IO.PROXY_URL + encodeURIComponent(IO.ASSET_URL + md5 + '/get/'), function(result) {
callback(result);
}));
}
}
};
var Base = function() {
this.isClone = false;
this.costumes = [];
this.currentCostumeIndex = 0;
this.objName = '';
this.sounds = [];
this.varRefs = {};
this.listRefs = {};
this.procedures = {};
this.listeners = {
whenClicked: [],
whenCloned: [],
whenGreenFlag: [],
whenIReceive: {},
whenKeyPressed: [],
whenSceneStarts: [],
whenSensorGreaterThan: []
};
for (var i = 0; i < 256; i++) {
this.listeners.whenKeyPressed.push([]);
}
this.fns = [];
this.scripts = [];
this.filters = {
color: 0,
fisheye: 0,
whirl: 0,
pixelate: 0,
mosaic: 0,
brightness: 0,
ghost: 0
};
this.initRuntime();
};
Base.prototype.fromJSON = function(data) {
this.objName = data.objName;
this.scripts = data.scripts;
this.currentCostumeIndex = data.currentCostumeIndex || 0;
this.costumes = data.costumes.map(function(d) {
return new Costume(d);
});
// this.sounds = data.sounds.map(function(d) {
// return new Sound(d);
// });
this.addLists(this.lists = data.lists);
this.addVariables(this.variables = data.variables);
return this;
};
Base.prototype.addVariables = function(variables) {
for (var i = 0; i < variables.length; i++) {
if (variables[i].isPeristent) {
throw new Error('Cloud variables are not supported');
}
this.varRefs[variables[i].name] = variables[i];
}
};
Base.prototype.addLists = function(lists) {
for (var i = 0; i < lists.length; i++) {
if (lists[i].isPeristent) {
throw new Error('Cloud lists are not supported');
}
this.listRefs[lists[i].listName] = lists[i];
// TODO list watchers
}
};
Base.prototype.showNextCostume = function() {
this.currentCostumeIndex = (this.currentCostumeIndex + 1) % this.costumes.length;
};
Base.prototype.showPreviousCostume = function() {
var length = this.costumes.length;
this.currentCostumeIndex = (this.currentCostumeIndex + length - 1) % length;
};
Base.prototype.getCostumeName = function() {
return this.costumes[this.currentCostumeIndex] ? this.costumes[this.currentCostumeIndex].objName : '';
};
Base.prototype.setCostume = function(costume) {
if (typeof costume !== 'number') {
costume = '' + costume;
for (var i = 0; i < this.costumes.length; i++) {
if (this.costumes[i].costumeName === costume) {
this.currentCostumeIndex = i;
return;
}
}
}
i = (Math.floor(Number(costume) || 0) - 1) % this.costumes.length;
if (i < 0) i += this.costumes.length;
this.currentCostumeIndex = i;
};
Base.prototype.setFilter = function(name, value) {
var min = 0;
var max = 100;
switch (name) {
case 'whirl':
case 'fisheye':
case 'pixelate': // absolute value
case 'mosaic': // absolute value
min = -Infinity;
max = Infinity;
break;
max = Infinity;
break;
case 'color':
value = value % 200;
if (value < 0) value += 200;
max = 200;
break;
}
if (value < min) value = min;
if (value > max) value = max;
this.filters[name] = value;
this.updateFilters();
};
Base.prototype.resetFilters = function() {
this.filters = {
color: 0,
fisheye: 0,
whirl: 0,
pixelate: 0,
mosaic: 0,
brightness: 0,
ghost: 0
};
};
Base.prototype.ask = function(question) {
var stage = this.stage;
if (question) {
if (this.isSprite && this.visible) {
stage.promptTitle.style.display = 'none';
} else {
stage.promptTitle.style.display = 'block';
stage.promptTitle.textContent = question;
}
} else {
stage.promptTitle.style.display = 'none';
}
stage.hidePrompt = false;
stage.prompter.style.display = 'block';
stage.prompt.value = '';
stage.prompt.focus();
};
var Stage = function() {
this.stage = this;
Stage.parent.call(this);
this.children = [];
this.defaultWatcherX = 10;
this.defaultWatcherY = 10;
this.info = {};
this.answer = '';
this.promptId = 0;
this.nextPromptId = 0;
this.tempoBPM = 60;
this.videoAlpha = 1;
this.zoom = 1;
this.maxZoom = 1;
this.baseNow = 0;
this.baseTime = 0;
this.timerStart = 0;
this.cloneCount = 0;
this.keys = {};
this.rawMouseX = 0;
this.rawMouseY = 0;
this.mouseX = 0;
this.mouseY = 0;
this.mousePressed = false;
this.penCanvas = document.createElement('canvas');
this.penCanvas.width = 480;
this.penCanvas.height = 360;
this.penContext = this.penCanvas.getContext('2d');
this.root = document.createElement('div');
this.root.style.position = 'absolute';
this.root.style.width = '480px';
this.root.style.height = '360px';
this.root.style.fontSize = '1px';
this.root.style.WebkitUserSelect =
this.root.style.MozUserSelect =
this.root.style.MSUserSelect =
this.root.style.WebkitUserSelect = 'none';
this.canvas = document.createElement('canvas');
this.root.appendChild(this.canvas);
this.canvas.width = 480;
this.canvas.height = 360;
this.context = this.canvas.getContext('2d');
this.canvas.tabIndex = 0;
this.canvas.style.outline = 'none';
this.canvas.style.position = 'absolute';
this.canvas.style.background = '#fff';
// hardware acceleration
this.canvas.style.WebkitTransform = 'translateZ(0)';
this.canvas.addEventListener('keydown', function(e) {
if (e.ctrlKey || e.altKey || e.metaKey) {
return;
}
this.keys[e.keyCode] = true;
this.trigger('whenKeyPressed', e.keyCode);
e.stopPropagation();
e.preventDefault();
}.bind(this));
this.canvas.addEventListener('keyup', function(e) {
this.keys[e.keyCode] = false;
e.stopPropagation();
e.preventDefault();
}.bind(this));
if (hasTouchEvents) {
document.addEventListener('touchstart', function(e) {
this.mousePressed = true;
for (var i = 0; i < e.changedTouches.length; i++) {
this.updateMouse(e.changedTouches[i]);
if (e.target === this.canvas) {
this.clickMouse();
}
}
if (e.target === this.canvas) e.preventDefault();
}.bind(this));
document.addEventListener('touchmove', function(e) {
this.updateMouse(e.changedTouches[0]);
}.bind(this));
document.addEventListener('touchend', function(e) {
this.releaseMouse();
}.bind(this));
} else {
document.addEventListener('mousedown', function(e) {
this.updateMouse(e);
this.mousePressed = true;
if (e.target === this.canvas) {
this.clickMouse();
e.preventDefault();
this.canvas.focus();
}
}.bind(this));
document.addEventListener('mousemove', function(e) {
this.updateMouse(e);
}.bind(this));
document.addEventListener('mouseup', function(e) {
this.updateMouse(e);
this.releaseMouse();
}.bind(this));
}
this.prompter = document.createElement('div');
this.root.appendChild(this.prompter);
this.prompter.style.position = 'absolute';
this.prompter.style.left =
this.prompter.style.right = '14em';
this.prompter.style.bottom = '6em';
this.prompter.style.padding = '5em 30em 5em 5em';
this.prompter.style.border = '3em solid rgb(46, 174, 223)';
this.prompter.style.borderRadius = '8em';
this.prompter.style.display = 'none';
this.promptTitle = document.createElement('div');
this.prompter.appendChild(this.promptTitle);
this.promptTitle.textContent = 'What\'s your name? aesfnaseu fihaosiefhoi uaesfhiouas ehfiha eofsh oiaesfoi seaof ho iaefshoi ufaeshiou afeshio aseof ';
this.promptTitle.style.cursor = 'default';
this.promptTitle.style.font = 'bold 13em sans-serif';
this.promptTitle.style.margin = '0 '+(-25/13)+'em '+(5/13)+'em 0';
this.promptTitle.style.whiteSpace = 'pre';
this.promptTitle.style.overflow = 'hidden';
this.promptTitle.style.textOverflow = 'ellipsis';
this.prompt = document.createElement('input');
this.prompter.appendChild(this.prompt);
this.prompt.style.border = '0';
this.prompt.style.background = '#eee';
this.prompt.style.MozBoxSizing =
this.prompt.style.boxSizing = 'border-box';
this.prompt.style.font = '13em sans-serif';
this.prompt.style.padding = '0 '+(3/13)+'em';
this.prompt.style.outline = '0';
this.prompt.style.margin = '0';
this.prompt.style.width = '100%';
this.prompt.style.height = ''+(20/13)+'em';
this.prompt.style.display = 'block';
this.prompt.style.WebkitBorderRadius =
this.prompt.style.borderRadius = '0';
this.prompt.style.WebkitBoxShadow =
this.prompt.style.boxShadow = 'inset '+(1/13)+'em '+(1/13)+'em '+(2/13)+'em rgba(0, 0, 0, .2), inset '+(-1/13)+'em '+(-1/13)+'em '+(1/13)+'em rgba(255, 255, 255, .2)';
this.prompt.style.WebkitAppearance = 'none';
this.promptButton = document.createElement('div');
this.prompter.appendChild(this.promptButton);
this.promptButton.style.width = '22em';
this.promptButton.style.height = '22em';
this.promptButton.style.position = 'absolute';
this.promptButton.style.right = '4em';
this.promptButton.style.bottom = '4em';
this.promptButton.style.background = 'url(icons.svg) -165em -37em';
this.promptButton.style.backgroundSize = '192em 64em';
this.prompt.addEventListener('keydown', function(e) {
if (e.keyCode === 13) {
this.submitPrompt();
}
}.bind(this));
this.promptButton.addEventListener(hasTouchEvents ? 'touchstart' : 'mousedown', this.submitPrompt.bind(this));
};
inherits(Stage, Base);
Stage.prototype.isStage = true;
Stage.prototype.fromJSON = function(data) {
Stage.parent.prototype.fromJSON.call(this, data);
data.children.forEach(function(d) {
if (d.listName) return;
this.children.push(new (d.cmd ? Watcher : Sprite)(this).fromJSON(d));
}, this);
this.children.forEach(function(child) {
if (child.resolve) child.resolve();
}, this);
P.compile(this);
return this;
};
Stage.prototype.focus = function() {
if (this.promptId < this.nextPromptId) {
this.prompt.focus();
} else {
this.canvas.focus();
}
};
Stage.prototype.updateMouse = function(e) {
var bb = this.canvas.getBoundingClientRect();
var x = (e.clientX - bb.left) / this.zoom - 240;
var y = 180 - (e.clientY - bb.top) / this.zoom;
this.rawMouseX = x;
this.rawMouseY = y;
if (x < -240) x = -240;
if (x > 240) x = 240;
if (y < -180) y = -180;
if (y > 180) y = 180;
this.mouseX = x;
this.mouseY = y;
};
Stage.prototype.setZoom = function(zoom) {
if (this.zoom === zoom) return;
if (this.maxZoom < zoom) {
this.maxZoom = zoom;
var canvas = this.penCanvas;
this.penCanvas = document.createElement('canvas');
this.penCanvas.width = 480 * zoom;
this.penCanvas.height = 360 * zoom;
this.penContext = this.penCanvas.getContext('2d');
this.penContext.drawImage(canvas, 0, 0, 480 * zoom, 360 * zoom);
this.penContext.scale(this.maxZoom, this.maxZoom);
}
this.root.style.width =
this.canvas.style.width = 480 * zoom + 'px';
this.root.style.height =
this.canvas.style.height = 360 * zoom + 'px';
this.root.style.fontSize = zoom + 'px';
this.zoom = zoom;
};
Stage.prototype.clickMouse = function() {
this.mouseSprite = undefined;
for (var i = this.children.length; i--;) {
if (this.children[i].isSprite && this.children[i].visible && this.children[i].touching('_mouse_')) {
if (this.children[i].isDraggable) {
this.mouseSprite = this.children[i];
this.children[i].mouseDown();
} else {
this.triggerFor(this.children[i], 'whenClicked');
}
return;
}
}
this.triggerFor(this, 'whenClicked');
};
Stage.prototype.releaseMouse = function() {
this.mousePressed = false;
if (this.mouseSprite) {
this.mouseSprite.mouseUp();
this.mouseSprite = undefined;
}
};
Stage.prototype.resetAllFilters = function() {
var children = this.children;
var i = children.length;
while (i--) {
children[i].resetFilters();
}
this.resetFilters();
};
Stage.prototype.removeAllClones = function() {
var i = this.children.length;
while (i--) {
if (this.children[i].isClone) {
this.children.splice(i, 1);
}
}
this.cloneCount = 0;
};
Stage.prototype.getObject = function(name) {
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].objName === name) {
return this.children[i];
}
}
if (name === '_stage_' || name === this.objName) {
return this;
}
};
Stage.prototype.draw = function() {
var context = this.context;
this.canvas.width = 480 * this.zoom; // clear
this.canvas.height = 360 * this.zoom;
context.save();
context.scale(this.zoom, this.zoom);
this.drawOn(context);
context.restore();
if (this.hidePrompt) {
this.hidePrompt = false;
this.prompter.style.display = 'none';
this.canvas.focus();
}
};
Stage.prototype.drawOn = function(context, except) {
var costume = this.costumes[this.currentCostumeIndex];
context.save();
context.scale(costume.scale, costume.scale);
context.globalAlpha = Math.max(0, Math.min(1, 1 - this.filters.ghost / 100));
context.drawImage(costume.image, 0, 0);
context.restore();
context.save();
context.scale(1 / this.maxZoom, 1 / this.maxZoom);
context.drawImage(this.penCanvas, 0, 0);
context.restore();
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].visible && this.children[i] !== except) {
this.children[i].draw(context);
}
}
};
Stage.prototype.moveTo = function() {};
Stage.prototype.submitPrompt = function() {
if (this.promptId < this.nextPromptId) {
this.answer = this.prompt.value;
this.promptId += 1;
if (this.promptId >= this.nextPromptId) {
this.hidePrompt = true;
}
}
};
var KEY_CODES = {
'space': 32,
'left arrow': 37,
'up arrow': 38,
'right arrow': 39,
'down arrow': 40
};
var getKeyCode = function(keyName) {
return KEY_CODES[keyName.toLowerCase()] || keyName.toUpperCase().charCodeAt(0);
};
var Sprite = function(stage) {
this.stage = stage;
Sprite.parent.call(this);
this.addVariables(stage.variables);
this.addLists(stage.lists);
this.direction = 90;
this.indexInLibrary = -1;
this.isDraggable = false;
this.isDragging = false;
this.rotationStyle = 'normal';
this.scale = 1;
this.scratchX = 0;
this.scratchY = 0;
this.spriteInfo = {};
this.visible = true;
this.penHue = 240;
this.penSaturation = 100;
this.penLightness = 50;
this.penSize = 1;
this.isPenDown = false;
this.isSprite = true;
};
inherits(Sprite, Base);
Sprite.prototype.fromJSON = function(data) {
Sprite.parent.prototype.fromJSON.call(this, data);
this.direction = data.direction;
this.indexInLibrary = data.indexInLibrary;
this.isDraggable = data.isDraggable;
this.rotationStyle = data.rotationStyle;
this.scale = data.scale;
this.scratchX = data.scratchX;
this.scratchY = data.scratchY;
this.spriteInfo = data.spriteInfo;
this.visible = data.visible;
return this;
};
Sprite.prototype.clone = function() {
var c = new Sprite(this.stage);
c.isClone = true;
c.costumes = this.costumes;
c.currentCostumeIndex = this.currentCostumeIndex;
c.objName = this.objName;
c.sounds = this.sounds;
c.variables = [];
c.lists = [];
for (var i = 0; i < this.variables.length; i++) {
var v = this.variables[i];
c.varRefs[v.name] = c.variables[i] = {
name: v.name,
value: v.value
};
}
for (var i = 0; i < this.lists.length; i++) {
var l = this.lists[i];
c.listRefs[l.listName] = c.lists[i] = {
contents: l.contents,
listName: l.listName
};
}
c.procedures = this.procedures;
c.listeners = this.listeners;
c.fns = this.fns;
c.scripts = this.scripts;
this.filters = {
color: this.filters.color,
fisheye: this.filters.fisheye,
whirl: this.filters.whirl,
pixelate: this.filters.pixelate,
mosaic: this.filters.mosaic,
brightness: this.filters.brightness,
ghost: this.filters.ghost
};
c.direction = this.direction;
c.indexInLibrary = this.indexInLibrary;
c.isDraggable = this.isDraggable;
c.rotationStyle = this.rotationStyle;
c.scale = this.scale;
c.scratchX = this.scratchX;
c.scratchY = this.scratchY;
c.visible = this.visible;
c.penHue = this.penHue;
c.penSaturation = this.penSaturation;
c.penLightness = this.penLightness;
c.penSize = this.penSize;
c.isPenDown = this.isPenDown;
c.initRuntime();
return c;
};
Sprite.prototype.mouseDown = function() {
this.dragStartX = this.scratchX;
this.dragStartY = this.scratchY;
this.dragOffsetX = this.scratchX - this.stage.mouseX;
this.dragOffsetY = this.scratchY - this.stage.mouseY;
this.isDragging = true;
};
Sprite.prototype.mouseUp = function() {
if (this.isDragging && this.scratchX === this.dragStartX && this.scratchY === this.dragStartY) {
this.stage.triggerFor(this, 'whenClicked');
}
this.isDragging = false;
};
Sprite.prototype.forward = function(steps) {
var d = (90 - this.direction) * Math.PI / 180;
this.moveTo(this.scratchX + steps * Math.cos(d), this.scratchY + steps * Math.sin(d));
};
Sprite.prototype.moveTo = function(x, y) {
var ox = this.scratchX;
var oy = this.scratchY;
if (ox === x && oy === y && !this.isPenDown) return;
this.scratchX = x;
this.scratchY = y;
this.keepOnStage();
if (this.isPenDown) {
var context = this.stage.penContext;
if (this.penSize % 2 > .5 && this.penSize % 2 < 1.5) {
ox -= .5;
oy -= .5;
x -= .5;
y -= .5;
}
context.strokeStyle = 'hsl(' + this.penHue + ',' + this.penSaturation + '%,' + (this.penLightness > 100 ? 200 - this.penLightness : this.penLightness) + '%)';
context.lineWidth = this.penSize;
context.lineCap = 'round';
context.beginPath();
context.moveTo(240 + ox, 180 - oy);
context.lineTo(240 + x, 180 - y);
context.stroke();
}
};
Sprite.prototype.dotPen = function() {
var context = this.stage.penContext;
var x = this.scratchX;
var y = this.scratchY;
if (this.penSize % 2 > .5 && this.penSize % 2 < 1.5) {
x -= .5;
y -= .5;
}
context.strokeStyle = 'hsl(' + this.penHue + ',' + this.penSaturation + '%,' + (this.penLightness > 100 ? 200 - this.penLightness : this.penLightness) + '%)';
context.lineWidth = this.penSize;
context.lineCap = 'round';
context.beginPath();
context.moveTo(240 + x, 180 - y);
context.lineTo(240.01 + x, 180 - y);
context.stroke();
};
Sprite.prototype.stamp = function() {
var context = this.stage.penContext;
this.draw(context);
};
Sprite.prototype.draw = function(context) {
var costume = this.costumes[this.currentCostumeIndex];
if (this.isDragging) {
this.moveTo(this.dragOffsetX + this.stage.mouseX, this.dragOffsetY + this.stage.mouseY);
}
if (costume) {
context.save();
context.translate(this.scratchX + 240, 180 - this.scratchY);
if (this.rotationStyle === 'normal') {
context.rotate((this.direction - 90) * Math.PI / 180);
} else if (this.rotationStyle === 'leftRight' && this.direction < 0) {
context.scale(-1, 1);
}
context.scale(this.scale, this.scale);
context.scale(costume.scale, costume.scale);
context.translate(-costume.rotationCenterX, -costume.rotationCenterY);
context.globalAlpha = Math.max(0, Math.min(1, 1 - this.filters.ghost / 100));
context.drawImage(costume.image, 0, 0);
context.restore();
}
};
Sprite.prototype.keepOnStage = function() {
// TODO
};
Sprite.prototype.setDirection = function(degrees) {
var d = degrees % 360;
if (d > 180) d -= 360;
if (d <= -180) d += 360;
this.direction = d;
};
var collisionCanvas = document.createElement('canvas');
var collisionContext = collisionCanvas.getContext('2d');
Sprite.prototype.touching = function(thing) {
var costume = this.costumes[this.currentCostumeIndex];
if (thing === '_mouse_') {
var bounds = this.rotatedBounds();
var x = this.stage.rawMouseX;
var y = this.stage.rawMouseY;
if (x < bounds.left || y < bounds.bottom || x > bounds.right || y > bounds.top) {
return false;
}
var d = costume.context.getImageData((x - this.scratchX) * costume.bitmapResolution + costume.rotationCenterX, (this.scratchY - y) * costume.bitmapResolution + costume.rotationCenterY, 1, 1).data;
return d[3] !== 0;
} else if (thing === '_edge_') {
var bounds = this.rotatedBounds();
return bounds.left <= -240 || bounds.right >= 240 || bounds.top >= 180 || bounds.bottom <= -180;
} else {
if (!this.visible) return false;
var sprite = this.stage.getObject(thing);
if (!sprite || !sprite.visible) return false;
var sc = sprite.costumes[sprite.currentCostumeIndex];
var mb = this.rotatedBounds();
var ob = sprite.rotatedBounds();
if (mb.bottom >= ob.top || ob.bottom >= mb.top || mb.left >= ob.right || ob.left >= mb.right) {
return false;
}
var left = Math.max(mb.left, ob.left);
var top = Math.min(mb.top, ob.top);
var right = Math.min(mb.right, ob.right);
var bottom = Math.max(mb.bottom, ob.bottom);
collisionCanvas.width = right - left;
collisionCanvas.height = top - bottom;
collisionContext.save();
collisionContext.translate(-(left + 240), -(180 - top));
this.draw(collisionContext);
collisionContext.globalCompositeOperation = 'source-in';
sprite.draw(collisionContext);
collisionContext.restore();
var data = collisionContext.getImageData(0, 0, right - left, top - bottom).data;
var length = (right - left) * (top - bottom) * 4;
for (var i = 0; i < length; i += 4) {
if (data[i + 3]) {
return true;
}
}
return false;
}
};
Sprite.prototype.touchingColor = function(rgb) {
var b = this.rotatedBounds();
collisionCanvas.width = b.right - b.left;
collisionCanvas.height = b.top - b.bottom;
collisionContext.save();
collisionContext.translate(-(240 + b.left), -(180 - b.top));
this.stage.drawOn(collisionContext, this);
collisionContext.globalCompositeOperation = 'destination-in';
this.draw(collisionContext);
collisionContext.restore();
var data = collisionContext.getImageData(0, 0, b.right - b.left, b.top - b.bottom).data;
rgb = rgb & 0xffffff;
var length = (b.right - b.left) * (b.top - b.bottom) * 4;
for (var i = 0; i < length; i += 4) {
if ((data[i] << 16 | data[i + 1] << 8 | data[i + 2]) === rgb) {
return true;
}
}
return false;
};
Sprite.prototype.bounceOffEdge = function() {
var b = this.rotatedBounds();
var dl = 240 + b.left;
var dt = 180 - b.top;
var dr = 240 - b.right;
var db = 180 + b.bottom;
var d = Math.min(dl, dt, dr, db);
if (d > 0) return;
var dir = this.direction * Math.PI / 180;
var dx = Math.sin(dir);
var dy = -Math.cos(dir);
switch (d) {
case dl: dx = Math.max(0.2, Math.abs(dx)); break;
case dt: dy = Math.max(0.2, Math.abs(dy)); break;
case dr: dx = -Math.max(0.2, Math.abs(dx)); break;
case db: dy = -Math.max(0.2, Math.abs(dy)); break;
}
this.direction = Math.atan2(dy, dx) * 180 / Math.PI + 90;
b = this.rotatedBounds();
var x = this.scratchX;
var y = this.scratchY;
if (b.left < -240) x += -240 - b.left;
if (b.top > 180) y += 180 - b.top;
if (b.right > 240) x += 240 - b.left;
if (b.bottom < -180) y += -180 - b.top;
};
Sprite.prototype.rotatedBounds = function() {
var costume = this.costumes[this.currentCostumeIndex];
var mSin = Math.sin(this.direction * Math.PI / 180);
var mCos = Math.cos(this.direction * Math.PI / 180);
var left = -costume.rotationCenterX * costume.scale * this.scale;
var top = costume.rotationCenterY * costume.scale * this.scale;
var right = left + costume.image.width * costume.scale * this.scale;
var bottom = top - costume.image.height * costume.scale * this.scale;
var tlX = mSin * left - mCos * top;
var tlY = mCos * left + mSin * top;
var trX = mSin * right - mCos * top;
var trY = mCos * right + mSin * top;
var blX = mSin * left - mCos * bottom;
var blY = mCos * left + mSin * bottom;
var brX = mSin * right - mCos * bottom;
var brY = mCos * right + mSin * bottom;
return {
left: this.scratchX + Math.min(tlX, trX, blX, brX),
right: this.scratchX + Math.max(tlX, trX, blX, brX),
top: this.scratchY + Math.max(tlY, trY, blY, brY),
bottom: this.scratchY + Math.min(tlY, trY, blY, brY)
};
};
Sprite.prototype.showRotatedBounds = function() {
var bounds = this.rotatedBounds();
var div = document.createElement('div');
div.style.outline = '1px solid red';
div.style.position = 'absolute';
div.style.left = (240 + bounds.left) + 'px';
div.style.top = (180 - bounds.top) + 'px';
div.style.width = (bounds.right - bounds.left) + 'px';
div.style.height = (bounds.top - bounds.bottom) + 'px';
this.stage.canvas.parentNode.appendChild(div);
};
Sprite.prototype.distanceTo = function(thing) {
if (thing === '_mouse_') {
var x = this.stage.mouseX;
var y = this.stage.mouseY;
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
x = sprite.scratchX;
y = sprite.scratchY;
}
return Math.sqrt((this.scratchX - x) * (this.scratchX - x) + (this.scratchY - y) * (this.scratchY - y));
};
Sprite.prototype.gotoObject = function(thing) {
if (thing === '_mouse_') {
this.moveTo(this.stage.mouseX, this.stage.mouseY);
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
this.moveTo(sprite.scratchX, sprite.scratchY);
}
};
Sprite.prototype.pointTowards = function(thing) {
if (thing === '_mouse_') {
var x = this.stage.mouseX;
var y = this.stage.mouseY;
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
x = sprite.scratchX;
y = sprite.scratchY;
}
this.direction = Math.atan2(x - this.scratchX, y - this.scratchY) * 180 / Math.PI;
};
var Costume = function(data) {
this.baseLayerID = data.baseLayerID;
this.baseLayerMD5 = data.baseLayerMD5;
this.baseLayer = data.$image;
this.bitmapResolution = data.bitmapResolution || 1;
this.scale = 1 / this.bitmapResolution;
this.costumeName = data.costumeName;
this.rotationCenterX = data.rotationCenterX;
this.rotationCenterY = data.rotationCenterY;
this.textLayer = data.$text;
this.image = document.createElement('canvas');
this.context = this.image.getContext('2d');
this.render();
this.baseLayer.onload = function() {
this.render();
}.bind(this);
if (this.textLayer) {
this.textLayer.onload = this.baseLayer.onload;
}
};
addEvents(Costume, 'load');
Costume.prototype.render = function() {
if (!this.baseLayer.width || this.textLayer && !this.textLayer.width) {
return;
}
this.image.width = this.baseLayer.width;
this.image.height = this.baseLayer.height;
this.context.drawImage(this.baseLayer, 0, 0);
if (this.textLayer) {
this.context.drawImage(this.textLayer, 0, 0);
}
};
var Watcher = function(stage) {
this.stage = stage;
this.cmd = 'getVar:';
this.color = '#ee7d16';
this.isDiscrete = true;
this.label = 'watcher';
this.mode = 1;
this.param = 'var';
this.sliderMax = 100;
this.sliderMin = 0;
this.target = undefined;
this.visible = true;
this.x = 0;
this.y = 0;
};
Watcher.prototype.fromJSON = function(data) {
this.cmd = data.cmd || 'getVar:';
if (data.color) {
var c = (data.color < 0 ? data.color + 0x1000000 : data.color).toString(16);
this.color = '#000000'.slice(0, -c.length) + c;
}
this.isDiscrete = data.isDiscrete == null ? true : data.isDiscrete;
this.label = data.label || '';
this.mode = data.mode || 1;
this.param = data.param;
this.sliderMax = data.sliderMax == null ? 100 : data.sliderMax;
this.sliderMin = data.sliderMin || 0;
this.targetName = data.target;
this.visible = data.visible == null ? true : data.visible;
this.x = data.x || 0;
this.y = data.y || 0;
return this;
};
Watcher.prototype.resolve = function() {
this.target = this.stage.getObject(this.targetName);
if (this.target && this.cmd === 'getVar:') {
var ref = this.target.varRefs[this.param];
if (ref) {
ref.watcher = this;
}
}
};
Watcher.prototype.draw = function(context) {
var value = 0;
if (!this.target) return;
switch (this.cmd) {
case 'answer':
value = this.stage.answer;
break;
case 'backgroundIndex':
value = this.stage.currentCostumeIndex + 1;
break;
case 'costumeIndex':
value = this.target.currentCostumeIndex + 1;
break;
case 'getVar:':
var ref = this.target.varRefs[this.param];
if (ref) {
if (this.mode === 3 && this.stage.mousePressed) {
var x = this.stage.mouseX + 240 - this.x - 5;
var y = 180 - this.stage.mouseY - this.y - 20;
if (x >= 0 && y >= 0 && x <= this.width - 5 - 5 && y <= 9) {
ref.value = this.sliderMin + Math.max(0, Math.min(1, (x - 2.5) / (this.width - 5 - 5 - 5))) * (this.sliderMax - this.sliderMin);
ref.value = this.isDiscrete ? Math.round(ref.value) : Math.round(ref.value * 100) / 100;
}
}
value = ref.value;
}
break;
case 'heading':
value = this.target.direction;
break;
case 'scale':
value = this.target.scale * 100;
break;
case 'sceneName':
ref = this.stage.costumes[this.stage.currentCostumeIndex];
if (ref) value = ref.costumeName;
break;
case 'senseVideoMotion':
// TODO
break;
case 'soundLevel':
// TODO
break;
case 'tempo':
value = this.stage.tempoBPM;
break;
case 'timeAndDate':
value = this.timeAndDate(this.param);
break;
case 'timer':
value = Math.round((this.stage.now() - this.stage.timerStart) / 100) / 10;
break;
case 'volume':
// TODO
break;
case 'xpos':
value = this.target.scratchX;
break;
case 'ypos':
value = this.target.scratchY;
break;
}
if (typeof value === 'number' && (value < 0.001 || value > 0.001)) {
value = Math.round(value * 1000) / 1000;
}
value = String(value);
if (this.labelWidth == null) {
context.font = 'bold 11px sans-serif';
this.labelWidth = context.measureText(this.label).width;
}
context.save();
context.translate(this.x, this.y);
if (this.mode === 1 || this.mode === 3) {
context.font = 'bold 11px sans-serif';
var dw = Math.max(41, 5 + context.measureText(value).width + 5);
var r = 5;
var w = this.width = 5 + this.labelWidth + 5 + dw + 5;
var h = this.mode === 1 ? 21 : 32;
context.strokeStyle = 'rgb(148, 145, 145)';
context.fillStyle = 'rgb(193, 196, 199)';
context.lineWidth = 2;
context.beginPath();
context.arc(r + 1, r + 1, r, Math.PI, Math.PI * 3/2, false);
context.arc(w - r - 1, r + 1, r, Math.PI * 3/2, 0, false);
context.arc(w - r - 1, h - r - 1, r, 0, Math.PI/2, false);
context.arc(r + 1, h - r - 1, r, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#000';
context.fillText(this.label, 5, 14);
var dh = 15;
var dx = 5 + this.labelWidth + 5;
var dy = 3;
var dr = 4;
context.save();
context.translate(dx, dy);
context.strokeStyle = '#fff';
context.fillStyle = this.color;
context.lineWidth = 2;
context.beginPath();
context.arc(dr + 1, dr + 1, dr, Math.PI, Math.PI * 3/2, false);
context.arc(dw - dr - 1, dr + 1, dr, Math.PI * 3/2, 0, false);
context.arc(dw - dr - 1, dh - dr - 1, dr, 0, Math.PI/2, false);
context.arc(dr + 1, dh - dr - 1, dr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#fff';
context.textAlign = 'center';
context.fillText(value, dw / 2, dh - 4);
context.restore();
if (this.mode === 3) {
var sh = 5;
var sw = w - 5 - 5;
var sr = 1.5;
var br = 4.5;
context.save();
context.translate(5, 22);
context.strokeStyle = 'rgb(148, 145, 145)';
context.fillStyle = 'rgb(213, 216, 219)';
context.lineWidth = 2;
context.beginPath();
context.arc(sr + 1, sr + 1, sr, Math.PI, Math.PI * 3/2, false);
context.arc(sw - sr - 1, sr + 1, sr, Math.PI * 3/2, 0, false);
context.arc(sw - sr - 1, sh - sr - 1, sr, 0, Math.PI/2, false);
context.arc(sr + 1, sh - sr - 1, sr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
var x = (sw - sh) * ((Number(value) || 0) - this.sliderMin) / (this.sliderMax - this.sliderMin);
context.strokeStyle = 'rgb(108, 105, 105)';
context.fillStyle = 'rgb(233, 236, 239)';
context.beginPath();
context.arc(x + sh / 2, sh / 2, br - 1, 0, Math.PI * 2, false);
context.stroke();
context.fill();
context.restore();
}
} else if (this.mode === 2) {
context.font = 'bold 15px sans-serif';
dh = 21;
dw = Math.max(41, 5 + context.measureText(value).width + 5);
dr = 4;
context.strokeStyle = '#fff';
context.fillStyle = this.color;
context.lineWidth = 2;
context.beginPath();
context.arc(dr + 1, dr + 1, dr, Math.PI, Math.PI * 3/2, false);
context.arc(dw - dr - 1, dr + 1, dr, Math.PI * 3/2, 0, false);
context.arc(dw - dr - 1, dh - dr - 1, dr, 0, Math.PI/2, false);
context.arc(dr + 1, dh - dr - 1, dr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#fff';
context.textAlign = 'center';
context.fillText(value, dw / 2, dh - 5);
}
context.restore();
};
return {
hasTouchEvents: hasTouchEvents,
getKeyCode: getKeyCode,
IO: IO,
Base: Base,
Stage: Stage,
Sprite: Sprite,
Watcher: Watcher
};
}());
P.compile = (function() {
'use strict';
var LOG_PRIMITIVES;
// LOG_PRIMITIVES = true;
var EVENT_SELECTORS = [
'procDef',
'whenClicked',
'whenCloned',
'whenGreenFlag',
'whenIReceive',
'whenKeyPressed',
'whenSceneStarts',
'whenSensorGreaterThan' // TODO
];
var compileScripts = function(object) {
for (var i = 0; i < object.scripts.length; i++) {
compileListener(object, object.scripts[i][2]);
}
};
var warnings;
var warn = function(message) {
warnings[message] = (warnings[message] || 0) + 1;
};
var name = 'a';
function varn() {
var i, s;
s = '';
i = name.length - 1;
while (i >= 0 && name[i] === 'z') {
s = 'a' + s;
--i;
}
if (i === -1) {
s = 'a' + s;
} else {
s = String.fromCharCode(name.charCodeAt(i) + 1) + s;
}
s = name.substr(0, i) + s;
name = s;
return '$tmp_' + s;
}
var compileListener = function(object, script) {
if (!script[0] || EVENT_SELECTORS.indexOf(script[0][0]) === -1) return;
var nextLabel = function() {
return object.fns.length + fns.length;
};
var label = function() {
var id = nextLabel();
fns.push(source.length);
return id;
};
var delay = function() {
source += 'return;\n';
label();
};
var queue = function(id) {
source += 'queue(' + id + ');\n';
source += 'return;\n';
};
var seq = function(script) {
if (!script) return;
for (var i = 0; i < script.length; i++) {
compile(script[i]);
}
};
var val = function(e) {
if (typeof e === 'number' || typeof e === 'boolean') {
return '' + e;
} else if (typeof e === 'string') {
return '"' + e
.replace(/\\/g, '\\\\')
.replace(/\n/g, '\\n')
.replace(/\r/g, '\\r')
.replace(/"/g, '\\"') + '"';
} else if (e[0] === 'xpos') { /* Motion */
return 'S.scratchX';
} else if (e[0] === 'ypos') {
return 'S.scratchY';
} else if (e[0] === 'heading') {
return 'S.direction';
} else if (e[0] === 'costumeIndex') { /* Looks */
return '(S.currentCostumeIndex + 1)';
} else if (e[0] === 'costumeName') {
return 'S.getCostumeName()';
} else if (e[0] === 'backgroundIndex') {
return '(self.currentCostumeIndex + 1)';
} else if (e[0] === 'sceneName') {
return 'self.getCostumeName()';
} else if (e[0] === 'scale') {
return 'S.scale';
// } else if (e[0] === 'volume') { /* Sound */
} else if (e[0] === 'tempo') {
return 'self.tempoBPM';
} else if (e[0] === 'getParam') { /* Data */
return '(C && C.args[' + val(e[1]) + '] != null ? C.args[' + val(e[1]) + '] : 0)';
} else if (e[0] === 'readVariable') {
return 'S.varRefs[' + val(e[1]) + '].value';
} else if (e[0] === 'contentsOfList:') {
return 'contentsOfList(' + val(e[1]) + ')';
} else if (e[0] === 'getLine:ofList:') {
return 'getLineOfList(' + val(e[2]) + ', ' + val(e[1]) + ')';
} else if (e[0] === 'lineCountOfList:') {
return 'lineCountOfList(' + val(e[1]) + ')';
} else if (e[0] === 'list:contains:') {
return 'listContains(' + val(e[1]) + ', ' + val(e[2]) + ')';
} else if (e[0] === '+') { /* Operators */
return '(' + num(e[1]) + ' + ' + num(e[2]) + ')';
} else if (e[0] === '-') {
return '(' + num(e[1]) + ' - ' + num(e[2]) + ')';
} else if (e[0] === '*') {
return '(' + num(e[1]) + ' * ' + num(e[2]) + ')';
} else if (e[0] === '/') {
return '(' + num(e[1]) + ' / ' + num(e[2]) + ')';
} else if (e[0] === 'randomFrom:to:') {
return 'random(' + num(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === '<') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === -1)';
} else if (e[0] === '=') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === 0)';
} else if (e[0] === '>') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === 1)';
} else if (e[0] === '&') {
return '(' + bool(e[1]) + ' && ' + bool(e[2]) + ')';
} else if (e[0] === '|') {
return '(' + bool(e[1]) + ' || ' + bool(e[2]) + ')';
} else if (e[0] === 'not') {
return '!' + bool(e[1]) + '';
} else if (e[0] === 'abs') {
return 'Math.abs(' + num(e[1]) + ')';
} else if (e[0] === 'sqrt') {
return 'Math.sqrt(' + num(e[1]) + ')';
} else if (e[0] === 'concatenate:with:') {
return '("" + ' + val(e[1]) + ' + ' + val(e[2]) + ')';
} else if (e[0] === 'letter:of:') {
return '(("" + ' + val(e[2]) + ')[Math.floor(' + num(e[1]) + ')] || "")';
} else if (e[0] === 'stringLength:') {
return '("" + ' + val(e[1]) + ').length';
} else if (e[0] === '%' || e[0] === '\\') {
return 'mod(' + num(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === 'rounded') {
return 'Math.round(' + num(e[1]) + ')';
} else if (e[0] === 'computeFunction:of:') {
return 'mathFunc(' + val(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === 'mousePressed') {
return 'self.mousePressed';
} else if (e[0] === 'mouseX') {
return 'self.mouseX';
} else if (e[0] === 'mouseY') {
return 'self.mouseY';
} else if (e[0] === 'touching:') { /* Sensing */
return 'S.touching(' + val(e[1]) + ')';
} else if (e[0] === 'touchingColor:') {
return 'S.touchingColor(' + val(e[1]) + ')';
// } else if (e[0] === 'color:sees:') {
} else if (e[0] === 'answer') {
return 'self.answer';
} else if (e[0] === 'timer') {
return '(self.now() - self.timerStart) / 1000';
} else if (e[0] === 'keyPressed:') {
return '!!self.keys[P.getKeyCode(' + val(e[1]) + ')]';
} else if (e[0] === 'distanceTo:') {
return 'S.distanceTo(' + val(e[1]) + ')';
} else if (e[0] === 'getAttribute:of:') {
return 'attribute(' + val(e[1]) + ', ' + val(e[2]) + ')';
// } else if (e[0] === 'getUserId') {
// } else if (e[0] === 'getUserName') {
// } else if (e[0] === 'soundLevel') {
// } else if (e[0] === 'isLoud') {
} else if (e[0] === 'timestamp') {
return '((Date.now() - epoch) / 86400000)';
} else if (e[0] === 'timeAndDate') {
return 'timeAndDate(' + val(e[1]) + ')';
// } else if (e[0] === 'sensor:') {
// } else if (e[0] === 'sensorPressed:') {
} else {
warn('Undefined val: ' + e[0]);
}
};
var bool = function(e) {
return 'bool(' + val(e) + ')';
};
var num = function(e) {
if (typeof e === 'number') {
return e;
}
if (typeof e === 'boolean' || typeof e === 'string') {
return Number(e) || 0;
}
return '(Number(' + val(e) + ') || 0)';
};
var compile = function(block) {
if (LOG_PRIMITIVES) {
source += 'console.log(' + val(block[0]) + ');\n';
}
if (block[0] === 'forward:') { /* Motion */
source += 'S.forward(' + num(block[1]) + ');\n';
} else if (block[0] === 'turnRight:') {
source += 'S.setDirection(S.direction + ' + num(block[1]) + ');\n';
} else if (block[0] === 'turnLeft:') {
source += 'S.setDirection(S.direction - ' + num(block[1]) + ');\n';
} else if (block[0] === 'heading:') {
source += 'S.setDirection(' + num(block[1]) + ');\n';
} else if (block[0] === 'pointTowards:') {
source += 'S.pointTowards(' + val(block[1]) + ');\n';
} else if (block[0] === 'gotoX:y:') {
source += 'S.moveTo(' + num(block[1]) + ', ' + num(block[2]) + ');\n';
} else if (block[0] === 'gotoSpriteOrMouse:') {
source += 'S.gotoObject(' + val(block[1]) + ');\n';
} else if (block[0] === 'changeXposBy:') {
source += 'S.moveTo(S.scratchX + ' + num(block[1]) + ', S.scratchY);\n';
} else if (block[0] === 'xpos:') {
source += 'S.moveTo(' + num(block[1]) + ', S.scratchY);\n';
} else if (block[0] === 'changeYposBy:') {
source += 'S.moveTo(S.scratchX, S.scratchY + ' + num(block[1]) + ');\n';
} else if (block[0] === 'ypos:') {
source += 'S.moveTo(S.scratchX, ' + num(block[1]) + ');\n';
} else if (block[0] === 'bounceOffEdge') {
source += 'S.bounceOffEdge();\n';
} else if (block[0] === 'setRotationStyle') {
source += 'var style = ' + val(block[1]) + ';\n';
source += 'S.rotationStyle = style === "left-right" ? "leftRight" : style === "don\'t rotate" ? "none" : "normal";';
} else if (block[0] === 'lookLike:') { /* Looks */
source += 'S.setCostume(' + val(block[1]) + ');\n';
} else if (block[0] === 'nextCostume') {
source += 'S.currentCostumeIndex = (S.currentCostumeIndex + 1) % S.costumes.length;\n';
} else if (block[0] === 'showBackground:' ||
block[0] === 'startScene') {
source += 'self.setCostume(' + val(block[1]) + ');\n';
source += 'sceneChange();\n';
} else if (block[0] === 'nextBackground' ||
block[0] === 'nextScene') {
source += 'S.currentCostumeIndex = (S.currentCostumeIndex + 1) % S.costumes.length;\n';
source += 'sceneChange();\n';
} else if (block[0] === 'startSceneAndWait') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
source += 'self.setCostume(' + val(block[1]) + ');\n';
source += 'R.threads = sceneChange();\n';
var id = label();
source += 'if (!running(R.threads)) {\n';
queue(id);
source += '}\n';
}
// } else if (block[0] === 'say:duration:elapsed:from:') {
} else if (block[0] === 'say:') {
source += 'console.log(' + val(block[1]) + ');\n';
// } else if (block[0] === 'think:duration:elapsed:from:') {
// } else if (block[0] === 'think:') {
} else if (block[0] === 'changeGraphicEffect:by:') {
source += 'S.filters[' + val(block[1]) + '] += ' + num(block[2]) + ';\n';
} else if (block[0] === 'setGraphicEffect:to:') {
source += 'S.filters[' + val(block[1]) + '] = ' + num(block[2]) + ';\n';
} else if (block[0] === 'filterReset') {
source += 'S.resetFilters();\n';
} else if (block[0] === 'changeSizeBy:') {
source += 'S.scale += ' + num(block[1]) + ' / 100;\n';
} else if (block[0] === 'setSizeTo:') {
source += 'S.scale = ' + num(block[1]) + ' / 100;\n';
} else if (block[0] === 'show') {
source += 'S.visible = true;\n';
} else if (block[0] === 'hide') {
source += 'S.visible = false;\n';
} else if (block[0] === 'comeToFront') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) self.children.splice(i, 1);\n';
source += 'self.children.push(S);\n';
} else if (block[0] === 'goBackByLayers:') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) {\n';
source += ' self.children.splice(i, 1);\n';
source += ' self.children.splice(Math.max(0, i - ' + num(block[1]) + '), 0, S);\n';
source += '}\n';
// } else if (block[0] === 'setVideoState') {
// } else if (block[0] === 'setVideoTransparency') {
// } else if (block[0] === 'playSound:') { /* Sound */
// } else if (block[0] === 'doPlaySoundAndWait') {
// } else if (block[0] === 'stopAllSounds') {
// } else if (block[0] === 'drum:duration:elapsed:from:') {
// } else if (block[0] === 'playDrum') {
// } else if (block[0] === 'rest:elapsed:from:') {
// } else if (block[0] === 'noteOn:duration:elapsed:from:') {
// } else if (block[0] === 'midiInstrument:') {
// } else if (block[0] === 'instrument:') {
// } else if (block[0] === 'changeVolumeBy:') {
// } else if (block[0] === 'setVolumeTo:') {
} else if (block[0] === 'changeTempoBy:') {
source += 'self.tempoBPM += ' + num(block[1]) + ';\n';
} else if (block[0] === 'setTempoTo:') {
source += 'self.tempoBPM = ' + num(block[1]) + ';\n';
} else if (block[0] === 'clearPenTrails') { /* Pen */
source += 'self.penCanvas.width = 480 * self.maxZoom;\n';
source += 'self.penContext.scale(self.maxZoom, self.maxZoom);\n';
} else if (block[0] === 'putPenDown') {
source += 'S.isPenDown = true;\n';
source += 'S.dotPen();\n';
} else if (block[0] === 'putPenUp') {
source += 'S.isPenDown = false;\n';
source += 'S.penState = null;\n';
} else if (block[0] === 'penColor:') {
source += 'var hsl = rgb2hsl(' + num(block[1]) + ');\n';
source += 'S.penHue = hsl[0];\n';
source += 'S.penSaturation = hsl[1];\n';
source += 'S.penLightness = hsl[2];\n';
} else if (block[0] === 'setPenHueTo:') {
source += 'S.penHue = ' + num(block[1]) + ' * 360 / 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'changePenHueBy:') {
source += 'S.penHue += ' + num(block[1]) + ' * 360 / 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'setPenShadeTo:') {
source += 'S.penLightness = ' + num(block[1]) + ' % 200;\n';
source += 'if (S.penLightness < 0) S.penLightness += 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'changePenShadeBy:') {
source += 'S.penLightness = (S.penLightness + ' + num(block[1]) + ') % 200;\n';
source += 'if (S.penLightness < 0) S.penLightness += 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'penSize:') {
source += 'S.penSize = ' + num(block[1]) + ';\n';
} else if (block[0] === 'changePenSizeBy:') {
source += 'S.penSize += ' + num(block[1]) + ';\n';
} else if (block[0] === 'stampCostume') {
source += 'S.draw(self.penContext);\n';
} else if (block[0] === 'setVar:to:') { /* Data */
source += 'if (S.varRefs[' + val(block[1]) + ']) S.varRefs[' + val(block[1]) + '].value = ' + val(block[2]) + ';\n';
} else if (block[0] === 'changeVar:by:') {
source += 'if (S.varRefs[' + val(block[1]) + ']) S.varRefs[' + val(block[1]) + '].value = (Number(S.varRefs[' + val(block[1]) + '].value) || 0) + ' + num(block[2]) + ';\n';
} else if (block[0] === 'append:toList:') {
source += 'appendToList(' + val(block[2]) + ', ' + val(block[1]) + ');\n';
} else if (block[0] === 'deleteLine:ofList:') {
source += 'deleteLineOfList(' + val(block[2]) + ', ' + val(block[1]) + ');\n';
} else if (block[0] === 'insert:at:ofList:') {
source += 'insertInList(' + val(block[3]) + ', ' + val(block[2]) + ', '+ val(block[1]) + ');\n';
} else if (block[0] === 'setLine:ofList:to:') {
source += 'setLineOfList(' + val(block[2]) + ', ' + val(block[1]) + ', '+ val(block[3]) + ');\n';
} else if (block[0] === 'showVariable:') {
source += 'showVariable(' + val(block[1]) + ', true);';
} else if (block[0] === 'hideVariable:') {
source += 'showVariable(' + val(block[1]) + ', false);';
// } else if (block[0] === 'showList:') {
// } else if (block[0] === 'hideList:') {
} else if (block[0] === 'broadcast:') { /* Control */
source += 'broadcast(' + val(block[1]) + ');';
} else if (block[0] === 'call') {
source += 'call(' + val(block[1]) + ', ' + (warp ? null : nextLabel()) + ', [';
for (var i = 2; i < block.length; i++) {
if (i > 2) {
source += ', ';
}
source += val(block[i]);
}
source += ']);\n';
if (!warp) delay();
} else if (block[0] === 'doBroadcastAndWait') {
source += 'R.threads = broadcast(' + val(block[1]) + ');\n';
var id = label();
source += 'if (running(R.threads)) {\n';
queue(id);
source += '}\n';
} else if (block[0] === 'doForever') {
var id = label();
seq(block[1]);
queue(id);
} else if (block[0] === 'doForeverIf') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
var id = label();
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
queue(id);
}
// } else if (block[0] === 'doForLoop') {
} else if (block[0] === 'doIf') {
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else if (block[0] === 'doIfElse') {
source += 'if (' + bool(block[1]) + ') {';
seq(block[2]);
source += '} else {';
seq(block[3]);
source += '}';
} else if (block[0] === 'doRepeat') {
source += 'save();\n';
source += 'R.count = ' + num(block[1]) + ';\n';
if (warp) {
source += 'while (R.count > 0) {\n';
source += ' R.count -= 1;\n';
seq(block[2]);
source += '}\n';
source += 'restore();\n';
} else {
var id = label();
source += 'if (R.count > 0) {\n';
source += ' R.count -= 1;\n';
seq(block[2]);
queue(id);
source += '} else {\n';
source += ' restore();\n';
source += '}\n';
}
} else if (block[0] === 'doReturn') {
source += 'endCall();\n';
source += 'return;\n';
} else if (block[0] === 'doUntil') {
if (warp) {
source += 'while (!' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else {
var id = label();
source += 'if (!' + bool(block[1]) + ') {\n';
seq(block[2]);
queue(id);
source += '}\n';
}
} else if (block[0] === 'doWhile') {
if (warp) {
source += 'while (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else {
var id = label();
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
queue(id);
source += '}\n';
}
} else if (block[0] === 'doWaitUntil') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
var id = label();
source += 'if (!' + bool(block[1]) + ') {\n';
queue(id);
source += '}\n';
}
} else if (block[0] === 'glideSecs:toX:y:elapsed:from:') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
source += 'save();\n';
source += 'R.start = self.now();\n';
source += 'R.duration = ' + num(block[1]) + ';\n';
source += 'R.baseX = S.scratchX;\n';
source += 'R.baseY = S.scratchY;\n';
source += 'R.deltaX = ' + num(block[2]) + ' - S.scratchX;\n';
source += 'R.deltaY = ' + num(block[3]) + ' - S.scratchY;\n';
var id = label();
source += 'var f = (self.now() - R.start) / (R.duration * 1000);\n';
source += 'if (f > 1) f = 1;\n';
source += 'S.moveTo(R.baseX + f * R.deltaX, R.baseY + f * R.deltaY);\n';
source += 'if (f < 1) {\n';
queue(id);
source += '}\n';
source += 'restore();\n';
}
} else if (block[0] === 'stopAll') {
source += 'self.stopAll();\n';
source += 'TERMINATE = true;\n';
source += 'return;\n';
} else if (block[0] === 'stopScripts') {
source += 'switch (' + val(block[1]) + ') {\n';
source += ' case "all":\n'
source += ' self.stopAll();\n';
source += ' TERMINATE = true;\n';
source += ' return;\n';
source += ' case "this script":\n';
source += ' endCall();\n';
source += ' return;\n';
source += ' case "other scripts in sprite":\n';
source += ' case "other scripts in stage":\n';
source += ' S.queue = [];\n';
source += ' TERMINATE = true;\n';
source += ' break;\n';
source += '}\n';
} else if (block[0] === 'wait:elapsed:from:') {
source += 'save();\n';
source += 'R.start = self.now();\n';
source += 'R.duration = ' + num(block[1]) + ';\n';
var id = label();
source += 'if (self.now() - R.start < R.duration * 1000) {\n';
queue(id);
source += '}\n';
source += 'restore();\n';
} else if (block[0] === 'warpSpeed') {
warp += 1;
seq(block[1]);
warp -= 1;
} else if (block[0] === 'createCloneOf') {
source += 'clone(' + val(block[1]) + ');\n'
} else if (block[0] === 'deleteClone') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) self.children.splice(i, 1);\n';
source += 'S.queue = [];\n';
source += 'TERMINATE = true;\n';
source += 'return;\n';
} else if (block[0] === 'doAsk') { /* Sensing */
source += 'R.id = self.nextPromptId++;\n';
var id = label();
source += 'if (self.promptId < R.id) {\n';
queue(id);
source += '}\n';
source += 'S.ask(' + val(block[1]) + ');';
var id = label();
source += 'if (self.promptId === R.id) {\n';
queue(id);
source += '}\n';
} else if (block[0] === 'timerReset') {
source += 'self.timerStart = self.now();\n';
} else {
warn('Undefined command: ' + block[0]);
}
};
var source = '';
var startfn = object.fns.length;
var fns = [0];
var warp = 0;
if (script[0][0] === 'procDef') {
warp += 1;
}
for (var i = 1; i < script.length; i++) {
compile(script[i]);
}
if (script[0][0] === 'procDef') {
source += 'endCall();\n';
source += 'return;\n';
}
var createContinuation = function(source) {
var result = '(function() {\n';
var brackets = 0;
var delBrackets = 0;
var shouldDelete = false;
for (var i = 0; i < source.length; i++) {
if (shouldDelete) {
if (source[i] === '{') {
delBrackets += 1;
} else if (source[i] === '}') {
delBrackets -= 1;
if (delBrackets === 0) {
shouldDelete = false;
}
}
} else {
if (source.substr(i, 8) === '} else {') {
if (brackets > 0) {
result += '} else {';
i += 7;
} else {
shouldDelete = true;
delBrackets = 0;
}
} else if (source[i] === '{') {
brackets += 1;
result += '{';
} else if (source[i] === '}') {
if (brackets > 0) {
result += '}';
brackets -= 1;
}
} else {
result += source[i];
}
}
}
result += '})';
try {
return P.runtime.scopedEval(result);
} catch (e) {
debugger;
}
};
for (var i = 0; i < fns.length; i++) {
object.fns.push(createContinuation(source.slice(fns[i])));
}
var f = object.fns[startfn];
if (script[0][0] === 'whenClicked') {
object.listeners.whenClicked.push(f);
} else if (script[0][0] === 'whenGreenFlag') {
object.listeners.whenGreenFlag.push(f);
} else if (script[0][0] === 'whenCloned') {
object.listeners.whenCloned.push(f);
} else if (script[0][0] === 'whenIReceive') {
var key = script[0][1].toLowerCase();
(object.listeners.whenIReceive[key] || (object.listeners.whenIReceive[key] = [])).push(f);
} else if (script[0][0] === 'whenKeyPressed') {
object.listeners.whenKeyPressed[P.getKeyCode(script[0][1])].push(f);
} else if (script[0][0] === 'whenSceneStarts') {
var key = script[0][1].toLowerCase();
(object.listeners.whenSceneStarts[key] || (object.listeners.whenSceneStarts[key] = [])).push(f);
} else if (script[0][0] === 'procDef') {
object.procedures[script[0][1]] = {
inputs: script[0][2],
fn: f
};
} else {
warn('Undefined event: ' + script[0][0]);
}
};
return function(stage) {
warnings = Object.create(null);
compileScripts(stage);
for (var i = 0; i < stage.children.length; i++) {
if (!stage.children[i].cmd) {
compileScripts(stage.children[i]);
}
}
for (var key in warnings) {
console.warn(key + (warnings[key] > 1 ? ' (repeated ' + warnings[key] + ' times)' : ''));
}
};
}());
P.runtime = (function() {
'use strict';
var self, S, R, STACK, C, CALLS, BASE, THREAD, TERMINATE, STOP_THREAD = {};
var bool = function(v) {
return Number(v) !== 0 && v !== '' && v !== 'false' && v !== false;
};
var compare = function(x, y) {
var nx = Number(x);
var ny = Number(y);
if (nx === nx && ny === ny) {
return nx < ny ? -1 : nx === ny ? 0 : 1;
}
var xs = String(x);
var ys = String(y);
return xs < ys ? -1 : xs === ys ? 0 : 1;
};
var mod = function(x, y) {
var r = x % y;
if (r / y < 0) {
r += y;
}
return r;
};
var random = function(x, y) {
x = Number(x) || 0;
y = Number(y) || 0;
if (x > y) {
var tmp = y;
y = x;
x = tmp;
}
if (x % 1 === 0 && y % 1 === 0) {
return Math.floor(Math.random() * (y - x + 1)) + x;
}
return Math.random() * (y - x) + x;
};
var rgb2hsl = function(rgb) {
var r = (rgb >> 16 & 0xff) / 0xff;
var g = (rgb >> 8 & 0xff) / 0xff;
var b = (rgb & 0xff) / 0xff;
var min = Math.min(r, g, b);
var max = Math.max(r, g, b);
if (min === max) {
return [0, 0, r * 100];
}
var c = max - min;
var l = (min + max) / 2;
var s = c / (1 - Math.abs(2 * l - 1));
var h;
switch (max) {
case r: h = ((g - b) / c + 6) % 6; break;
case g: h = (b - r) / c + 2; break;
case b: h = (r - g) / c + 4; break;
}
h *= 60;
return [h, s * 100, l * 100];
};
var clone = function(name) {
var parent = name === '_myself_' ? S : self.getObject(name);
var c = parent.clone();
self.children.splice(self.children.indexOf(parent), 0, c);
self.triggerFor(c, 'whenCloned');
};
var epoch = Date.UTC(2000, 0, 1);
var timeAndDate = P.Watcher.prototype.timeAndDate = function(format) {
switch (format) {
case 'year':
return new Date().getFullYear();
case 'month':
return new Date().getMonth() + 1;
case 'date':
return new Date().getDate();
case 'day of week':
return new Date().getDay() + 1;
case 'hour':
return new Date().getHours();
case 'minute':
return new Date().getMinutes();
case 'second':
return new Date().getSeconds();
}
return 0;
};
var listIndex = function(list, index, length) {
if (index === 'random' || index === 'any') {
return Math.floor(Math.random() * length);
}
if (index === 'last') {
return length - 1;
}
var i = Math.floor(index) - 1;
return i === i && i >= 0 && i < length ? i : -1;
};
var contentsOfList = function(name) {
var list = S.listRefs[name];
if (!list) return '';
var isSingle = true;
for (var i = 0; i < list.contents.length; i++) {
if (list.contents[i].length !== 1) {
isSingle = false;
break;
}
}
return list.contents.join(isSingle ? '' : ' ');
};
var getLineOfList = function(name, index) {
var list = S.listRefs[name];
if (!list) return 0;
var i = listIndex(list, index, list.contents.length);
return list && i > -1 ? list.contents[i] : 0;
};
var lineCountOfList = function(name) {
var list = S.listRefs[name];
return list ? list.contents.length : 0;
};
var listContains = function(name, value) {
var list = S.listRefs[name];
return list ? list.contents.indexOf(value) > -1 : 0;
};
var appendToList = function(name, value) {
var list = S.listRefs[name];
if (list) {
list.contents.push(value);
}
};
var deleteLineOfList = function(name, index) {
var list = S.listRefs[name];
if (list) {
if (index === 'all') {
list.contents = [];
} else {
var i = listIndex(list, index, list.contents.length);
if (i > -1) {
list.contents.splice(i, 1);
}
}
}
};
var insertInList = function(name, index, value) {
var list = S.listRefs[name];
if (list) {
var i = listIndex(list, index, list.contents.length + 1);
if (i === list.contents.length) {
list.contents.push(value);
} else if (i > -1) {
list.contents.splice(i, 0, value);
}
}
};
var setLineOfList = function(name, index, value) {
var list = S.listRefs[name];
if (list) {
var i = listIndex(list, index, list.contents.length);
if (i > -1) {
list.contents[i] = value;
}
}
};
var mathFunc = function(f, x) {
switch (f) {
case 'abs':
case 'floor':
case 'sqrt':
return Math[f](x);
case 'ceiling':
return Math.ceil(x);
case 'cos':
x = 90 - x;
case 'sin':
// 0 <= x <= 45 for degrees->radians to work well
var neg = false;
x = x % 360;
if (x < 0) x += 360;
if (x > 180) {
neg = !neg;
x -= 180;
}
if (x > 90) {
x = 180 - x;
}
var z = x > 45 ?
Math.cos((90 - x) * Math.PI / 180) :
Math.sin(x * Math.PI / 180);
return neg ? -z : z;
case 'tan':
x = x % 180;
if (x < 0) x += 180;
return x > 90 ?
-Math.tan((90 - x) * Math.PI / 180) :
Math.tan(x * Math.PI / 180);
case 'asin':
case 'acos':
case 'atan':
return Math[f](x) * 180 / Math.PI;
case 'ln':
return Math.log(x);
case 'log':
return Math.log(x) / Math.LN10;
case 'e ^':
return Math.exp(x);
case '10 ^':
return Math.exp(x * Math.LN10)
}
return 0;
};
var showVariable = function(name, visible) {
var ref = S.varRefs[name];
if (ref) {
if (!ref.watcher) {
ref.watcher = new P.Watcher(self);
ref.watcher.x = self.defaultWatcherX;
ref.watcher.y = self.defaultWatcherY;
self.defaultWatcherY += 26;
if (self.defaultWatcherY >= 450) {
self.defaultWatcherY = 10;
self.defaultWatcherX += 150;
}
ref.watcher.target = S.variables.indexOf(ref) !== -1 ? S : self;
ref.watcher.label = (ref.watcher.target === self ? '' : ref.watcher.target.objName + ': ') + name;
ref.watcher.param = name;
self.children.push(ref.watcher);
}
ref.watcher.visible = visible;
}
};
var attribute = function(attr, objName) {
var o = self.getObject(objName);
if (!o) return 0;
if (o.isSprite) {
switch (attr) {
case 'x position': return o.scratchX;
case 'y position': return o.scratchY;
case 'direction': return o.direction;
case 'costume #': return o.currentCostumeIndex + 1;
case 'costume name': return o.costumes[o.currentCostumeIndex].costumeName;
case 'size': return o.scale * 100;
case 'volume': return 0; // TODO
}
} else {
switch (attr) {
case 'background #':
case 'backdrop #': return o.currentCostumeIndex + 1;
case 'backdrop name': return o.costumes[o.currentCostumeIndex].costumeName;
case 'volume': return 0; // TODO
}
}
var ref = o.varRefs[attr];
if (ref) {
return ref.value;
}
return 0;
};
var save = function() {
STACK.push(R);
R = {};
};
var restore = function() {
R = STACK.pop();
};
var call = function(spec, id, values) {
var procedure = S.procedures[spec];
if (procedure) {
var args = {};
for (var i = 0; i < values.length; i++) {
args[procedure.inputs[i]] = values[i];
}
STACK.push(R);
CALLS.push(C);
C = {
fn: S.fns[id],
args: args,
stack: STACK = []
};
R = {};
procedure.fn();
} else {
S.fns[id]();
}
};
var endCall = function() {
if (CALLS.length) {
var fn = C.fn;
C = CALLS.pop();
STACK = C.stack;
R = STACK.pop();
if (fn != null) fn();
} else {
throw STOP_THREAD;
}
};
var sceneChange = function() {
return self.trigger('whenSceneStarts', self.costumes[self.currentCostumeIndex].costumeName);
};
var broadcast = function(name) {
return self.trigger('whenIReceive', name);
};
var running = function(bases) {
for (var j = 0; j < self.queue.length; j++) {
if (self.queue[j] && bases.indexOf(self.queue[j].base) !== -1) return true;
}
for (var i = 0; i < self.children.length; i++) {
var c = self.children[i];
if (c.isSprite) {
for (var j = 0; j < c.queue.length; j++) {
if (c.queue[j] && bases.indexOf(c.queue[j].base) !== -1) return true;
}
}
}
return false;
};
var queue = function(id) {
S.queue[THREAD] = {
base: BASE,
fn: S.fns[id],
calls: CALLS
};
};
// Internal definition
(function() {
'use strict';
P.Stage.prototype.framerate = 30;
P.Base.prototype.initRuntime = function() {
this.queue = [];
};
P.Base.prototype.startThread = function(base) {
var thread = {
base: base,
fn: base,
calls: [{ args:{}, stack: [{}] }]
};
for (var i = 0; i < this.queue.length; i++) {
if (this.queue[i] && this.queue[i].base === base) {
this.queue[i] = thread;
if (S === this && THREAD === i) {
throw STOP_THREAD;
}
return;
}
}
this.queue.push(thread);
};
P.Stage.prototype.triggerFor = function(sprite, event, arg) {
var threads;
if (event === 'whenClicked') {
threads = sprite.listeners.whenClicked;
} else if (event === 'whenCloned') {
threads = sprite.listeners.whenCloned;
} else if (event === 'whenGreenFlag') {
threads = sprite.listeners.whenGreenFlag;
} else if (event === 'whenIReceive') {
threads = sprite.listeners.whenIReceive[arg.toLowerCase()]
} else if (event === 'whenKeyPressed') {
threads = sprite.listeners.whenKeyPressed[arg];
} else if (event === 'whenSceneStarts') {
threads = sprite.listeners.whenSceneStarts[arg.toLowerCase()];
}
if (threads) {
for (var i = 0; i < threads.length; i++) {
sprite.startThread(threads[i]);
}
return threads;
}
return [];
};
P.Stage.prototype.trigger = function(event, arg) {
var result = this.triggerFor(this, event, arg);
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].isSprite) {
result = result.concat(this.triggerFor(this.children[i], event, arg));
}
}
return result;
};
P.Stage.prototype.triggerGreenFlag = function() {
this.timerStart = this.now();
this.trigger('whenGreenFlag');
};
P.Stage.prototype.start = function() {
this.isRunning = true;
if (this.interval) return;
this.baseTime = Date.now();
this.interval = setInterval(this.step.bind(this), 1000 / this.framerate);
};
P.Stage.prototype.pause = function() {
if (this.interval) {
this.baseNow = this.now();
clearInterval(this.interval);
delete this.interval;
}
this.isRunning = false;
};
P.Stage.prototype.stopAll = function() {
this.hidePrompt = false;
this.prompter.style.display = 'none';
this.promptId = this.nextPromptId = 0;
this.queue = [];
this.resetFilters();
for (var i = 0; i < this.children.length; i++) {
var c = this.children[i];
if (c.isClone) {
this.children.splice(i, 1);
i -= 1;
} else if (c.isSprite) {
c.queue = [];
c.resetFilters();
}
}
};
P.Stage.prototype.runFor = function(sprite) {
S = sprite;
var queue = sprite.queue;
TERMINATE = false;
for (THREAD = 0; THREAD < queue.length; THREAD++) {
if (queue[THREAD]) {
var fn = queue[THREAD].fn;
BASE = queue[THREAD].base;
CALLS = queue[THREAD].calls;
C = CALLS.pop();
STACK = C.stack;
R = STACK.pop();
queue[THREAD] = undefined;
try {
fn();
} catch (e) {
if (e !== STOP_THREAD) throw e;
queue[THREAD] = undefined;
continue;
}
STACK.push(R);
CALLS.push(C);
if (TERMINATE) return;
}
}
for (var i = queue.length; i--;) {
if (!queue[i]) queue.splice(i, 1);
}
};
P.Stage.prototype.now = function() {
return this.baseNow + Date.now() - this.baseTime;
};
P.Stage.prototype.step = function() {
try {
self = this;
var start = Date.now();
do {
var children = this.children.slice(0);
for (var i = 0; i < children.length; i++) {
if (children[i].isSprite) {
this.runFor(children[i]);
}
}
this.runFor(this);
} while (self.isTurbo && Date.now() - start < 1000 / this.framerate);
this.draw();
S = null;
} catch (e) {
this.handleError(e);
clearInterval(this.interval);
}
};
P.Stage.prototype.handleError = function() {
console.error(e.stack);
};
}());
return {
scopedEval: function(source) {
return eval(source);
}
};
}());
| phosphorus.js | var P = (function() {
'use strict';
var hasOwnProperty = {}.hasOwnProperty;
var hasTouchEvents = 'ontouchstart' in document;
var inherits = function(cla, sup) {
cla.prototype = Object.create(sup.prototype);
cla.parent = sup;
cla.base = function(self, method /*, args... */) {
return sup.prototype[method].call(self, [].slice.call(arguments, 2));
};
};
var addEvents = function(cla /*, events... */) {
[].slice.call(arguments, 1).forEach(function(event) {
addEvent(cla, event);
});
};
var addEvent = function(cla, event) {
var capital = event[0].toUpperCase() + event.substr(1);
cla.prototype.addEventListener = cla.prototype.addEventListener || function(event, listener) {
var listeners = this['$' + event] = this['$' + event] || [];
listeners.push(listener);
return this;
};
cla.prototype.removeEventListener = cla.prototype.removeEventListener || function(event, listener) {
var listeners = this['$' + event];
if (listeners) {
var i = listeners.indexOf(listener);
if (i > -1) {
listeners.splice(i, 1);
}
}
return this;
};
cla.prototype.dispatchEvent = cla.prototype.dispatchEvent || function(event, arg) {
var listeners = this['$' + event];
if (listeners) {
listeners.forEach(function(listener) {
listener(arg);
});
}
var listener = this['on' + event];
if (listener) {
listener(arg);
}
return this;
};
cla.prototype['on' + capital] = function(listener) {
this.addEventListener(event, listener);
return this;
};
cla.prototype['dispatch' + capital] = function(arg) {
this.dispatchEvent(event, arg);
return this;
};
};
var Request = function() {
this.loaded = 0;
};
addEvents(Request, 'load', 'progress', 'error');
Request.prototype.progress = function(loaded, total, lengthComputable) {
this.loaded = loaded;
this.total = total;
this.lengthComputable = lengthComputable;
this.dispatchProgress({
loaded: loaded,
total: total,
lengthComputable: lengthComputable
});
};
Request.prototype.load = function(result) {
this.result = result;
this.isDone = true;
this.dispatchLoad(result);
};
Request.prototype.error = function(error) {
this.result = error;
this.isError = true;
this.isDone = true;
this.dispatchError(error);
};
var CompositeRequest = function() {
this.requests = [];
this.isDone = true;
this.update = this.update.bind(this);
this.error = this.error.bind(this);
};
inherits(CompositeRequest, Request);
CompositeRequest.prototype.add = function(request) {
if (request instanceof CompositeRequest) {
for (var i = 0; i < request.requests.length; i++) {
this.add(request.requests[i]);
}
} else {
this.requests.push(request);
request.addEventListener('progress', this.update);
request.addEventListener('load', this.update);
request.addEventListener('error', this.error);
this.update();
}
};
CompositeRequest.prototype.update = function() {
if (this.isError) return;
var requests = this.requests;
var i = requests.length;
var total = 0;
var loaded = 0;
var lengthComputable = true;
var uncomputable = 0;
var done = 0;
while (i--) {
var r = requests[i];
loaded += r.loaded;
if (r.isDone) {
total += r.loaded;
done += 1;
} else if (r.lengthComputable) {
total += r.total;
} else {
lengthComputable = false;
uncomputable += 1;
}
}
if (!lengthComputable && uncomputable !== requests.length) {
var each = total / (requests.length - uncomputable) * uncomputable;
i = requests.length;
total = 0;
loaded = 0;
lengthComputable = true;
while (i--) {
var r = requests[i];
if (r.lengthComputable) {
loaded += r.loaded;
total += r.total;
} else {
total += each;
if (r.isDone) loaded += each;
}
}
}
this.progress(loaded, total, lengthComputable);
this.doneCount = done;
this.isDone = done === requests.length;
if (this.isDone && !this.defer) {
this.load(this.getResult());
}
};
CompositeRequest.prototype.getResult = function() {
throw new Error('Users must implement getResult()');
};
var IO = {};
IO.BASE_URL = 'http://scratch.mit.edu/internalapi/'
IO.PROJECT_URL = IO.BASE_URL + 'project/';
IO.ASSET_URL = IO.BASE_URL + 'asset/';
IO.PROXY_URL = 'proxy.php?u=';
IO.init = function(request) {
IO.projectRequest = request;
IO.zip = null;
IO.costumes = null;
IO.images = null;
};
IO.load = function(url, callback, self) {
var request = new Request;
var xhr = new XMLHttpRequest;
xhr.open('GET', IO.PROXY_URL + encodeURIComponent(url), true);
xhr.onprogress = function(e) {
request.progress(e.loaded, e.total, e.lengthComputable);
};
xhr.onload = function() {
if (xhr.status === 200) {
request.load(xhr.responseText);
} else {
request.error(new Error('HTTP ' + xhr.status + ': ' + xhr.statusText));
}
};
xhr.onerror = function() {
request.error(new Error('XHR Error'));
};
setTimeout(xhr.send.bind(xhr));
if (callback) request.onLoad(callback.bind(self));
return request;
};
IO.loadImage = function(url, callback, self) {
var request = new Request;
var image = new Image;
image.src = url;
image.onload = function() {
request.load(image);
};
image.onerror = function() {
request.error(new Error('Failed to load image'));
};
// var xhr = new XMLHttpRequest;
// xhr.open('GET', IO.PROXY_URL + encodeURIComponent(url), true);
// xhr.responseType = 'blob';
// xhr.onprogress = function(e) {
// request.progress(e.loaded, e.total, e.lengthComputable);
// };
// xhr.onload = function(e) {
// if (xhr.status === 200) {
// var reader = new FileReader;
// reader.addEventListener('loadend', function() {
// var image = new Image;
// image.src = reader.result;
// image.onload = function() {
// request.load(image);
// };
// });
// reader.readAsDataURL(xhr.response);
// } else {
// request.error(new Error('HTTP ' + xhr.status + ': ' + xhr.statusText));
// }
// };
// xhr.onerror = function() {
// request.error(new Image('Failed to load image'));
// };
// xhr.send();
if (callback) request.onLoad(callback.bind(self));
return request;
};
IO.loadScratchr2Project = function(id, callback, self) {
var request = new CompositeRequest;
IO.init(request);
request.defer = true;
request.add(IO.load(IO.PROJECT_URL + id + '/get/?' + Math.random().toString().slice(2)).onLoad(function(contents) {
try {
var json = JSON.parse(contents);
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
}));
return request;
};
IO.loadScratchr2ProjectTitle = function(id, callback, self) {
var request = new CompositeRequest;
request.defer = true;
request.add(P.IO.load('http://scratch.mit.edu/projects/' + id + '/').onLoad(function(data) {
var m = /<title>\s*(.+?)(\s+on\s+Scratch)?\s*<\/title>/.exec(data);
if (callback) request.onLoad(callback.bind(self));
if (m) {
var d = document.createElement('div');
d.innerHTML = m[1];
request.load(d.innerText);
} else {
request.error(new Error('No title'));
}
}));
return request;
};
IO.loadJSONProject = function(json, callback, self) {
var request = new CompositeRequest;
IO.init(request);
try {
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
return request;
};
IO.loadSB2Project = function(ab, callback, self) {
var request = new CompositeRequest;
IO.init(request);
try {
IO.zip = new JSZip(ab);
var json = JSON.parse(IO.zip.file('project.json').asText());
IO.images = 1; // ignore pen trails
IO.sounds = 0;
IO.loadProject(json);
if (callback) request.onLoad(callback.bind(self));
if (request.isDone) {
request.load(new Stage().fromJSON(json));
} else {
request.defer = false;
request.getResult = function() {
return new Stage().fromJSON(json);
};
}
} catch (e) {
request.error(e);
}
return request;
};
IO.loadSB2File = function(f, callback, self) {
var cr = new CompositeRequest;
cr.defer = true;
var request = new Request;
cr.add(request);
var reader = new FileReader;
reader.onloadend = function() {
cr.defer = true;
cr.add(IO.loadSB2Project(reader.result, function(result) {
cr.defer = false;
cr.getResult = function() {
return result;
};
cr.update();
}));
request.load();
};
reader.onprogress = function(e) {
request.progress(e.loaded, e.total, e.lengthComputable);
};
reader.readAsArrayBuffer(f);
if (callback) cr.onLoad(callback.bind(self));
return cr;
};
IO.loadProject = function(data) {
IO.loadArray(data.children, IO.loadObject);
IO.loadBase(data);
};
IO.loadBase = function(data) {
data.scripts = data.scripts || [];
data.costumes = IO.loadArray(data.costumes, IO.loadCostume);
data.sounds = IO.loadArray(data.sounds, IO.loadSound);
data.variables = data.variables || [];
data.lists = data.lists || [];
};
IO.loadArray = function(data, process) {
if (!data) return [];
for (var i = 0; i < data.length; i++) {
process(data[i]);
}
return data;
};
IO.loadObject = function(data) {
if (!data.cmd && !data.listName) {
IO.loadBase(data);
}
};
IO.loadCostume = function(data) {
IO.loadMD5(data.baseLayerMD5, function(asset) {
data.$image = asset;
});
if (data.textLayerMD5) {
IO.loadMD5(data.textLayerMD5, function(asset) {
data.$text = asset;
});
}
};
IO.loadSound = function() {
// TODO
};
IO.loadMD5 = function(md5, callback, zip, index) {
var ext = md5.split('.').pop();
if (ext === 'svg') {
var cb = function(source) {
var canvas = document.createElement('canvas');
var context = canvas.getContext('2d');
var image = new Image;
callback(image);
canvg(canvas, source, {
ignoreMouse: true,
ignoreAnimation: true,
ignoreClear: true,
renderCallback: function() {
image.src = canvas.toDataURL();
}
})
};
if (IO.zip) {
var image = IO.images;
IO.images += 1;
cb(IO.zip.file(image + '.svg').asText());
} else {
IO.projectRequest.add(IO.load(IO.ASSET_URL + md5 + '/get/', cb));
}
} else {
if (IO.zip) {
var image = IO.images;
IO.images += 1;
var request = new Request;
var f = IO.zip.file(image + '.' + ext).asBinary();
var img = new Image;
img.onload = function() {
if (callback) callback(img);
request.load();
};
img.src = 'data:image/' + (ext === 'jpg' ? 'jpeg' : ext) + ';base64,' + btoa(f);
IO.projectRequest.add(request);
} else {
IO.projectRequest.add(
IO.loadImage(IO.PROXY_URL + encodeURIComponent(IO.ASSET_URL + md5 + '/get/'), function(result) {
callback(result);
}));
}
}
};
var Base = function() {
this.isClone = false;
this.costumes = [];
this.currentCostumeIndex = 0;
this.objName = '';
this.sounds = [];
this.varRefs = {};
this.listRefs = {};
this.procedures = {};
this.listeners = {
whenClicked: [],
whenCloned: [],
whenGreenFlag: [],
whenIReceive: {},
whenKeyPressed: [],
whenSceneStarts: [],
whenSensorGreaterThan: []
};
for (var i = 0; i < 256; i++) {
this.listeners.whenKeyPressed.push([]);
}
this.fns = [];
this.scripts = [];
this.filters = {
color: 0,
fisheye: 0,
whirl: 0,
pixelate: 0,
mosaic: 0,
brightness: 0,
ghost: 0
};
this.initRuntime();
};
Base.prototype.fromJSON = function(data) {
this.objName = data.objName;
this.scripts = data.scripts;
this.currentCostumeIndex = data.currentCostumeIndex || 0;
this.costumes = data.costumes.map(function(d) {
return new Costume(d);
});
// this.sounds = data.sounds.map(function(d) {
// return new Sound(d);
// });
this.addLists(this.lists = data.lists);
this.addVariables(this.variables = data.variables);
return this;
};
Base.prototype.addVariables = function(variables) {
for (var i = 0; i < variables.length; i++) {
if (variables[i].isPeristent) {
throw new Error('Cloud variables are not supported');
}
this.varRefs[variables[i].name] = variables[i];
}
};
Base.prototype.addLists = function(lists) {
for (var i = 0; i < lists.length; i++) {
if (lists[i].isPeristent) {
throw new Error('Cloud lists are not supported');
}
this.listRefs[lists[i].listName] = lists[i];
// TODO list watchers
}
};
Base.prototype.showNextCostume = function() {
this.currentCostumeIndex = (this.currentCostumeIndex + 1) % this.costumes.length;
};
Base.prototype.showPreviousCostume = function() {
var length = this.costumes.length;
this.currentCostumeIndex = (this.currentCostumeIndex + length - 1) % length;
};
Base.prototype.getCostumeName = function() {
return this.costumes[this.currentCostumeIndex] ? this.costumes[this.currentCostumeIndex].objName : '';
};
Base.prototype.setCostume = function(costume) {
if (typeof costume !== 'number') {
costume = '' + costume;
for (var i = 0; i < this.costumes.length; i++) {
if (this.costumes[i].costumeName === costume) {
this.currentCostumeIndex = i;
return;
}
}
}
i = (Math.floor(Number(costume) || 0) - 1) % this.costumes.length;
if (i < 0) i += this.costumes.length;
this.currentCostumeIndex = i;
};
Base.prototype.setFilter = function(name, value) {
var min = 0;
var max = 100;
switch (name) {
case 'whirl':
case 'fisheye':
case 'pixelate': // absolute value
case 'mosaic': // absolute value
min = -Infinity;
max = Infinity;
break;
max = Infinity;
break;
case 'color':
value = value % 200;
if (value < 0) value += 200;
max = 200;
break;
}
if (value < min) value = min;
if (value > max) value = max;
this.filters[name] = value;
this.updateFilters();
};
Base.prototype.resetFilters = function() {
this.filters = {
color: 0,
fisheye: 0,
whirl: 0,
pixelate: 0,
mosaic: 0,
brightness: 0,
ghost: 0
};
};
Base.prototype.ask = function(question) {
var stage = this.stage;
if (question) {
if (this.isSprite && this.visible) {
stage.promptTitle.style.display = 'none';
} else {
stage.promptTitle.style.display = 'block';
stage.promptTitle.textContent = question;
}
} else {
stage.promptTitle.style.display = 'none';
}
stage.hidePrompt = false;
stage.prompter.style.display = 'block';
stage.prompt.value = '';
stage.prompt.focus();
};
var Stage = function() {
this.stage = this;
Stage.parent.call(this);
this.children = [];
this.defaultWatcherX = 10;
this.defaultWatcherY = 10;
this.info = {};
this.answer = '';
this.promptId = 0;
this.nextPromptId = 0;
this.tempoBPM = 60;
this.videoAlpha = 1;
this.zoom = 1;
this.maxZoom = 1;
this.baseNow = 0;
this.baseTime = 0;
this.timerStart = 0;
this.cloneCount = 0;
this.keys = {};
this.rawMouseX = 0;
this.rawMouseY = 0;
this.mouseX = 0;
this.mouseY = 0;
this.mousePressed = false;
this.penCanvas = document.createElement('canvas');
this.penCanvas.width = 480;
this.penCanvas.height = 360;
this.penContext = this.penCanvas.getContext('2d');
this.root = document.createElement('div');
this.root.style.position = 'absolute';
this.root.style.width = '480px';
this.root.style.height = '360px';
this.root.style.fontSize = '1px';
this.root.style.WebkitUserSelect =
this.root.style.MozUserSelect =
this.root.style.MSUserSelect =
this.root.style.WebkitUserSelect = 'none';
this.canvas = document.createElement('canvas');
this.root.appendChild(this.canvas);
this.canvas.width = 480;
this.canvas.height = 360;
this.context = this.canvas.getContext('2d');
this.canvas.tabIndex = 0;
this.canvas.style.outline = 'none';
this.canvas.style.position = 'absolute';
this.canvas.style.background = '#fff';
// hardware acceleration
this.canvas.style.WebkitTransform = 'translateZ(0)';
this.canvas.addEventListener('keydown', function(e) {
if (e.ctrlKey || e.altKey || e.metaKey) {
return;
}
this.keys[e.keyCode] = true;
this.trigger('whenKeyPressed', e.keyCode);
e.stopPropagation();
e.preventDefault();
}.bind(this));
this.canvas.addEventListener('keyup', function(e) {
this.keys[e.keyCode] = false;
e.stopPropagation();
e.preventDefault();
}.bind(this));
if (hasTouchEvents) {
document.addEventListener('touchstart', function(e) {
this.mousePressed = true;
for (var i = 0; i < e.changedTouches.length; i++) {
this.updateMouse(e.changedTouches[i]);
if (e.target === this.canvas) {
this.clickMouse();
}
}
if (e.target === this.canvas) e.preventDefault();
}.bind(this));
document.addEventListener('touchmove', function(e) {
this.updateMouse(e.changedTouches[0]);
}.bind(this));
document.addEventListener('touchend', function(e) {
this.releaseMouse();
}.bind(this));
} else {
document.addEventListener('mousedown', function(e) {
this.updateMouse(e);
this.mousePressed = true;
if (e.target === this.canvas) {
this.clickMouse();
e.preventDefault();
this.canvas.focus();
}
}.bind(this));
document.addEventListener('mousemove', function(e) {
this.updateMouse(e);
}.bind(this));
document.addEventListener('mouseup', function(e) {
this.updateMouse(e);
this.releaseMouse();
}.bind(this));
}
this.prompter = document.createElement('div');
this.root.appendChild(this.prompter);
this.prompter.style.position = 'absolute';
this.prompter.style.left =
this.prompter.style.right = '14em';
this.prompter.style.bottom = '6em';
this.prompter.style.padding = '5em 30em 5em 5em';
this.prompter.style.border = '3em solid rgb(46, 174, 223)';
this.prompter.style.borderRadius = '8em';
this.prompter.style.display = 'none';
this.promptTitle = document.createElement('div');
this.prompter.appendChild(this.promptTitle);
this.promptTitle.textContent = 'What\'s your name? aesfnaseu fihaosiefhoi uaesfhiouas ehfiha eofsh oiaesfoi seaof ho iaefshoi ufaeshiou afeshio aseof ';
this.promptTitle.style.cursor = 'default';
this.promptTitle.style.font = 'bold 13em sans-serif';
this.promptTitle.style.margin = '0 '+(-25/13)+'em '+(5/13)+'em 0';
this.promptTitle.style.whiteSpace = 'pre';
this.promptTitle.style.overflow = 'hidden';
this.promptTitle.style.textOverflow = 'ellipsis';
this.prompt = document.createElement('input');
this.prompter.appendChild(this.prompt);
this.prompt.style.border = '0';
this.prompt.style.background = '#eee';
this.prompt.style.MozBoxSizing =
this.prompt.style.boxSizing = 'border-box';
this.prompt.style.font = '13em sans-serif';
this.prompt.style.padding = '0 '+(3/13)+'em';
this.prompt.style.outline = '0';
this.prompt.style.margin = '0';
this.prompt.style.width = '100%';
this.prompt.style.height = ''+(20/13)+'em';
this.prompt.style.display = 'block';
this.prompt.style.WebkitBorderRadius =
this.prompt.style.borderRadius = '0';
this.prompt.style.WebkitBoxShadow =
this.prompt.style.boxShadow = 'inset '+(1/13)+'em '+(1/13)+'em '+(2/13)+'em rgba(0, 0, 0, .2), inset '+(-1/13)+'em '+(-1/13)+'em '+(1/13)+'em rgba(255, 255, 255, .2)';
this.prompt.style.WebkitAppearance = 'none';
this.promptButton = document.createElement('div');
this.prompter.appendChild(this.promptButton);
this.promptButton.style.width = '22em';
this.promptButton.style.height = '22em';
this.promptButton.style.position = 'absolute';
this.promptButton.style.right = '4em';
this.promptButton.style.bottom = '4em';
this.promptButton.style.background = 'url(icons.svg) -165em -37em';
this.promptButton.style.backgroundSize = '192em 64em';
this.prompt.addEventListener('keydown', function(e) {
if (e.keyCode === 13) {
this.submitPrompt();
}
}.bind(this));
this.promptButton.addEventListener(hasTouchEvents ? 'touchstart' : 'mousedown', this.submitPrompt.bind(this));
};
inherits(Stage, Base);
Stage.prototype.isStage = true;
Stage.prototype.fromJSON = function(data) {
Stage.parent.prototype.fromJSON.call(this, data);
data.children.forEach(function(d) {
if (d.listName) return;
this.children.push(new (d.cmd ? Watcher : Sprite)(this).fromJSON(d));
}, this);
this.children.forEach(function(child) {
if (child.resolve) child.resolve();
}, this);
P.compile(this);
return this;
};
Stage.prototype.focus = function() {
if (this.promptId < this.nextPromptId) {
this.prompt.focus();
} else {
this.canvas.focus();
}
};
Stage.prototype.updateMouse = function(e) {
var bb = this.canvas.getBoundingClientRect();
var x = (e.clientX - bb.left) / this.zoom - 240;
var y = 180 - (e.clientY - bb.top) / this.zoom;
this.rawMouseX = x;
this.rawMouseY = y;
if (x < -240) x = -240;
if (x > 240) x = 240;
if (y < -180) y = -180;
if (y > 180) y = 180;
this.mouseX = x;
this.mouseY = y;
};
Stage.prototype.setZoom = function(zoom) {
if (this.zoom === zoom) return;
if (this.maxZoom < zoom) {
this.maxZoom = zoom;
var canvas = this.penCanvas;
this.penCanvas = document.createElement('canvas');
this.penCanvas.width = 480 * zoom;
this.penCanvas.height = 360 * zoom;
this.penContext = this.penCanvas.getContext('2d');
this.penContext.drawImage(canvas, 0, 0, 480 * zoom, 360 * zoom);
this.penContext.scale(this.maxZoom, this.maxZoom);
}
this.root.style.width =
this.canvas.style.width = 480 * zoom + 'px';
this.root.style.height =
this.canvas.style.height = 360 * zoom + 'px';
this.root.style.fontSize = zoom + 'px';
this.zoom = zoom;
};
Stage.prototype.clickMouse = function() {
this.mouseSprite = undefined;
for (var i = this.children.length; i--;) {
if (this.children[i].isSprite && this.children[i].visible && this.children[i].touching('_mouse_')) {
if (this.children[i].isDraggable) {
this.mouseSprite = this.children[i];
this.children[i].mouseDown();
} else {
this.triggerFor(this.children[i], 'whenClicked');
}
return;
}
}
this.triggerFor(this, 'whenClicked');
};
Stage.prototype.releaseMouse = function() {
this.mousePressed = false;
if (this.mouseSprite) {
this.mouseSprite.mouseUp();
this.mouseSprite = undefined;
}
};
Stage.prototype.resetAllFilters = function() {
var children = this.children;
var i = children.length;
while (i--) {
children[i].resetFilters();
}
this.resetFilters();
};
Stage.prototype.removeAllClones = function() {
var i = this.children.length;
while (i--) {
if (this.children[i].isClone) {
this.children.splice(i, 1);
}
}
this.cloneCount = 0;
};
Stage.prototype.getObject = function(name) {
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].objName === name) {
return this.children[i];
}
}
if (name === '_stage_' || name === this.objName) {
return this;
}
};
Stage.prototype.draw = function() {
var context = this.context;
this.canvas.width = 480 * this.zoom; // clear
this.canvas.height = 360 * this.zoom;
context.save();
context.scale(this.zoom, this.zoom);
this.drawOn(context);
context.restore();
if (this.hidePrompt) {
this.hidePrompt = false;
this.prompter.style.display = 'none';
this.canvas.focus();
}
};
Stage.prototype.drawOn = function(context, except) {
var costume = this.costumes[this.currentCostumeIndex];
context.save();
context.scale(costume.scale, costume.scale);
context.globalAlpha = Math.max(0, Math.min(1, 1 - this.filters.ghost / 100));
context.drawImage(costume.image, 0, 0);
context.restore();
context.save();
context.scale(1 / this.maxZoom, 1 / this.maxZoom);
context.drawImage(this.penCanvas, 0, 0);
context.restore();
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].visible && this.children[i] !== except) {
this.children[i].draw(context);
}
}
};
Stage.prototype.moveTo = function() {};
Stage.prototype.submitPrompt = function() {
if (this.promptId < this.nextPromptId) {
this.answer = this.prompt.value;
this.promptId += 1;
if (this.promptId >= this.nextPromptId) {
this.hidePrompt = true;
}
}
};
var KEY_CODES = {
'space': 32,
'left arrow': 37,
'up arrow': 38,
'right arrow': 39,
'down arrow': 40
};
var getKeyCode = function(keyName) {
return KEY_CODES[keyName.toLowerCase()] || keyName.toUpperCase().charCodeAt(0);
};
var Sprite = function(stage) {
this.stage = stage;
Sprite.parent.call(this);
this.addVariables(stage.variables);
this.addLists(stage.lists);
this.direction = 90;
this.indexInLibrary = -1;
this.isDraggable = false;
this.isDragging = false;
this.rotationStyle = 'normal';
this.scale = 1;
this.scratchX = 0;
this.scratchY = 0;
this.spriteInfo = {};
this.visible = true;
this.penHue = 240;
this.penSaturation = 100;
this.penLightness = 50;
this.penSize = 1;
this.isPenDown = false;
this.isSprite = true;
};
inherits(Sprite, Base);
Sprite.prototype.fromJSON = function(data) {
Sprite.parent.prototype.fromJSON.call(this, data);
this.direction = data.direction;
this.indexInLibrary = data.indexInLibrary;
this.isDraggable = data.isDraggable;
this.rotationStyle = data.rotationStyle;
this.scale = data.scale;
this.scratchX = data.scratchX;
this.scratchY = data.scratchY;
this.spriteInfo = data.spriteInfo;
this.visible = data.visible;
return this;
};
Sprite.prototype.clone = function() {
var c = new Sprite(this.stage);
c.isClone = true;
c.costumes = this.costumes;
c.currentCostumeIndex = this.currentCostumeIndex;
c.objName = this.objName;
c.sounds = this.sounds;
c.variables = [];
c.lists = [];
for (var i = 0; i < this.variables.length; i++) {
var v = this.variables[i];
c.varRefs[v.name] = c.variables[i] = {
name: v.name,
value: v.value
};
}
for (var i = 0; i < this.lists.length; i++) {
var l = this.lists[i];
c.listRefs[l.listName] = c.lists[i] = {
contents: l.contents,
listName: l.listName
};
}
c.procedures = this.procedures;
c.listeners = this.listeners;
c.fns = this.fns;
c.scripts = this.scripts;
this.filters = {
color: this.filters.color,
fisheye: this.filters.fisheye,
whirl: this.filters.whirl,
pixelate: this.filters.pixelate,
mosaic: this.filters.mosaic,
brightness: this.filters.brightness,
ghost: this.filters.ghost
};
c.direction = this.direction;
c.indexInLibrary = this.indexInLibrary;
c.isDraggable = this.isDraggable;
c.rotationStyle = this.rotationStyle;
c.scale = this.scale;
c.scratchX = this.scratchX;
c.scratchY = this.scratchY;
c.visible = this.visible;
c.penHue = this.penHue;
c.penSaturation = this.penSaturation;
c.penLightness = this.penLightness;
c.penSize = this.penSize;
c.isPenDown = this.isPenDown;
c.initRuntime();
return c;
};
Sprite.prototype.mouseDown = function() {
this.dragStartX = this.scratchX;
this.dragStartY = this.scratchY;
this.dragOffsetX = this.scratchX - this.stage.mouseX;
this.dragOffsetY = this.scratchY - this.stage.mouseY;
this.isDragging = true;
};
Sprite.prototype.mouseUp = function() {
if (this.isDragging && this.scratchX === this.dragStartX && this.scratchY === this.dragStartY) {
this.stage.triggerFor(this, 'whenClicked');
}
this.isDragging = false;
};
Sprite.prototype.forward = function(steps) {
var d = (90 - this.direction) * Math.PI / 180;
this.moveTo(this.scratchX + steps * Math.cos(d), this.scratchY + steps * Math.sin(d));
};
Sprite.prototype.moveTo = function(x, y) {
var ox = this.scratchX;
var oy = this.scratchY;
if (ox === x && oy === y && !this.isPenDown) return;
this.scratchX = x;
this.scratchY = y;
this.keepOnStage();
if (this.isPenDown) {
var context = this.stage.penContext;
if (this.penSize % 2 > .5 && this.penSize % 2 < 1.5) {
ox -= .5;
oy -= .5;
x -= .5;
y -= .5;
}
context.strokeStyle = 'hsl(' + this.penHue + ',' + this.penSaturation + '%,' + (this.penLightness > 100 ? 200 - this.penLightness : this.penLightness) + '%)';
context.lineWidth = this.penSize;
context.lineCap = 'round';
context.beginPath();
context.moveTo(240 + ox, 180 - oy);
context.lineTo(240 + x, 180 - y);
context.stroke();
}
};
Sprite.prototype.dotPen = function() {
var context = this.stage.penContext;
var x = this.scratchX;
var y = this.scratchY;
if (this.penSize % 2 > .5 && this.penSize % 2 < 1.5) {
x -= .5;
y -= .5;
}
context.strokeStyle = 'hsl(' + this.penHue + ',' + this.penSaturation + '%,' + (this.penLightness > 100 ? 200 - this.penLightness : this.penLightness) + '%)';
context.lineWidth = this.penSize;
context.lineCap = 'round';
context.beginPath();
context.moveTo(240 + x, 180 - y);
context.lineTo(240.01 + x, 180 - y);
context.stroke();
};
Sprite.prototype.stamp = function() {
var context = this.stage.penContext;
this.draw(context);
};
Sprite.prototype.draw = function(context) {
var costume = this.costumes[this.currentCostumeIndex];
if (this.isDragging) {
this.moveTo(this.dragOffsetX + this.stage.mouseX, this.dragOffsetY + this.stage.mouseY);
}
if (costume) {
context.save();
context.translate(this.scratchX + 240, 180 - this.scratchY);
if (this.rotationStyle === 'normal') {
context.rotate((this.direction - 90) * Math.PI / 180);
} else if (this.rotationStyle === 'leftRight' && this.direction < 0) {
context.scale(-1, 1);
}
context.scale(this.scale, this.scale);
context.scale(costume.scale, costume.scale);
context.translate(-costume.rotationCenterX, -costume.rotationCenterY);
context.globalAlpha = Math.max(0, Math.min(1, 1 - this.filters.ghost / 100));
context.drawImage(costume.image, 0, 0);
context.restore();
}
};
Sprite.prototype.keepOnStage = function() {
// TODO
};
Sprite.prototype.setDirection = function(degrees) {
var d = degrees % 360;
if (d > 180) d -= 360;
if (d <= -180) d += 360;
this.direction = d;
};
var collisionCanvas = document.createElement('canvas');
var collisionContext = collisionCanvas.getContext('2d');
Sprite.prototype.touching = function(thing) {
var costume = this.costumes[this.currentCostumeIndex];
if (thing === '_mouse_') {
var bounds = this.rotatedBounds();
var x = this.stage.rawMouseX;
var y = this.stage.rawMouseY;
if (x < bounds.left || y < bounds.bottom || x > bounds.right || y > bounds.top) {
return false;
}
var d = costume.context.getImageData((x - this.scratchX) * costume.bitmapResolution + costume.rotationCenterX, (this.scratchY - y) * costume.bitmapResolution + costume.rotationCenterY, 1, 1).data;
return d[3] !== 0;
} else if (thing === '_edge_') {
var bounds = this.rotatedBounds();
return bounds.left <= -240 || bounds.right >= 240 || bounds.top >= 180 || bounds.bottom <= -180;
} else {
if (!this.visible) return false;
var sprite = this.stage.getObject(thing);
if (!sprite || !sprite.visible) return false;
var sc = sprite.costumes[sprite.currentCostumeIndex];
var mb = this.rotatedBounds();
var ob = sprite.rotatedBounds();
if (mb.bottom >= ob.top || ob.bottom >= mb.top || mb.left >= ob.right || ob.left >= mb.right) {
return false;
}
var left = Math.max(mb.left, ob.left);
var top = Math.min(mb.top, ob.top);
var right = Math.min(mb.right, ob.right);
var bottom = Math.max(mb.bottom, ob.bottom);
collisionCanvas.width = right - left;
collisionCanvas.height = top - bottom;
collisionContext.save();
collisionContext.translate(-(left + 240), -(180 - top));
this.draw(collisionContext);
collisionContext.globalCompositeOperation = 'source-in';
sprite.draw(collisionContext);
collisionContext.restore();
var data = collisionContext.getImageData(0, 0, right - left, top - bottom).data;
var length = (right - left) * (top - bottom) * 4;
for (var i = 0; i < length; i += 4) {
if (data[i + 3]) {
return true;
}
}
return false;
}
};
Sprite.prototype.touchingColor = function(rgb) {
var b = this.rotatedBounds();
collisionCanvas.width = b.right - b.left;
collisionCanvas.height = b.top - b.bottom;
collisionContext.save();
collisionContext.translate(-(240 + b.left), -(180 - b.top));
this.stage.drawOn(collisionContext, this);
collisionContext.globalCompositeOperation = 'destination-in';
this.draw(collisionContext);
collisionContext.restore();
var data = collisionContext.getImageData(0, 0, b.right - b.left, b.top - b.bottom).data;
rgb = rgb & 0xffffff;
var length = (b.right - b.left) * (b.top - b.bottom) * 4;
for (var i = 0; i < length; i += 4) {
if ((data[i] << 16 | data[i + 1] << 8 | data[i + 2]) === rgb) {
return true;
}
}
return false;
};
Sprite.prototype.bounceOffEdge = function() {
var b = this.rotatedBounds();
var dl = 240 + b.left;
var dt = 180 - b.top;
var dr = 240 - b.right;
var db = 180 + b.bottom;
var d = Math.min(dl, dt, dr, db);
if (d > 0) return;
var dir = this.direction * Math.PI / 180;
var dx = Math.sin(dir);
var dy = -Math.cos(dir);
switch (d) {
case dl: dx = Math.max(0.2, Math.abs(dx)); break;
case dt: dy = Math.max(0.2, Math.abs(dy)); break;
case dr: dx = -Math.max(0.2, Math.abs(dx)); break;
case db: dy = -Math.max(0.2, Math.abs(dy)); break;
}
this.direction = Math.atan2(dy, dx) * 180 / Math.PI + 90;
b = this.rotatedBounds();
var x = this.scratchX;
var y = this.scratchY;
if (b.left < -240) x += -240 - b.left;
if (b.top > 180) y += 180 - b.top;
if (b.right > 240) x += 240 - b.left;
if (b.bottom < -180) y += -180 - b.top;
};
Sprite.prototype.rotatedBounds = function() {
var costume = this.costumes[this.currentCostumeIndex];
var mSin = Math.sin(this.direction * Math.PI / 180);
var mCos = Math.cos(this.direction * Math.PI / 180);
var left = -costume.rotationCenterX * costume.scale * this.scale;
var top = costume.rotationCenterY * costume.scale * this.scale;
var right = left + costume.image.width * costume.scale * this.scale;
var bottom = top - costume.image.height * costume.scale * this.scale;
var tlX = mSin * left - mCos * top;
var tlY = mCos * left + mSin * top;
var trX = mSin * right - mCos * top;
var trY = mCos * right + mSin * top;
var blX = mSin * left - mCos * bottom;
var blY = mCos * left + mSin * bottom;
var brX = mSin * right - mCos * bottom;
var brY = mCos * right + mSin * bottom;
return {
left: this.scratchX + Math.min(tlX, trX, blX, brX),
right: this.scratchX + Math.max(tlX, trX, blX, brX),
top: this.scratchY + Math.max(tlY, trY, blY, brY),
bottom: this.scratchY + Math.min(tlY, trY, blY, brY)
};
};
Sprite.prototype.showRotatedBounds = function() {
var bounds = this.rotatedBounds();
var div = document.createElement('div');
div.style.outline = '1px solid red';
div.style.position = 'absolute';
div.style.left = (240 + bounds.left) + 'px';
div.style.top = (180 - bounds.top) + 'px';
div.style.width = (bounds.right - bounds.left) + 'px';
div.style.height = (bounds.top - bounds.bottom) + 'px';
this.stage.canvas.parentNode.appendChild(div);
};
Sprite.prototype.distanceTo = function(thing) {
if (thing === '_mouse_') {
var x = this.stage.mouseX;
var y = this.stage.mouseY;
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
x = sprite.scratchX;
y = sprite.scratchY;
}
return Math.sqrt((this.scratchX - x) * (this.scratchX - x) + (this.scratchY - y) * (this.scratchY - y));
};
Sprite.prototype.gotoObject = function(thing) {
if (thing === '_mouse_') {
this.moveTo(this.stage.mouseX, this.stage.mouseY);
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
this.moveTo(sprite.scratchX, sprite.scratchY);
}
};
Sprite.prototype.pointTowards = function(thing) {
if (thing === '_mouse_') {
var x = this.stage.mouseX;
var y = this.stage.mouseY;
} else {
var sprite = this.stage.getObject(thing);
if (!sprite) return 0;
x = sprite.scratchX;
y = sprite.scratchY;
}
this.direction = Math.atan2(x - this.scratchX, y - this.scratchY) * 180 / Math.PI;
};
var Costume = function(data) {
this.baseLayerID = data.baseLayerID;
this.baseLayerMD5 = data.baseLayerMD5;
this.baseLayer = data.$image;
this.bitmapResolution = data.bitmapResolution || 1;
this.scale = 1 / this.bitmapResolution;
this.costumeName = data.costumeName;
this.rotationCenterX = data.rotationCenterX;
this.rotationCenterY = data.rotationCenterY;
this.textLayer = data.$text;
this.image = document.createElement('canvas');
this.context = this.image.getContext('2d');
this.render();
this.baseLayer.onload = function() {
this.render();
}.bind(this);
if (this.textLayer) {
this.textLayer.onload = this.baseLayer.onload;
}
};
addEvents(Costume, 'load');
Costume.prototype.render = function() {
if (!this.baseLayer.width || this.textLayer && !this.textLayer.width) {
return;
}
this.image.width = this.baseLayer.width;
this.image.height = this.baseLayer.height;
this.context.drawImage(this.baseLayer, 0, 0);
if (this.textLayer) {
this.context.drawImage(this.textLayer, 0, 0);
}
};
var Watcher = function(stage) {
this.stage = stage;
this.cmd = 'getVar:';
this.color = '#ee7d16';
this.isDiscrete = true;
this.label = 'watcher';
this.mode = 1;
this.param = 'var';
this.sliderMax = 100;
this.sliderMin = 0;
this.target = undefined;
this.visible = true;
this.x = 0;
this.y = 0;
};
Watcher.prototype.fromJSON = function(data) {
this.cmd = data.cmd || 'getVar:';
if (data.color) {
var c = (data.color < 0 ? data.color + 0x1000000 : data.color).toString(16);
this.color = '#000000'.slice(0, -c.length) + c;
}
this.isDiscrete = data.isDiscrete == null ? true : data.isDiscrete;
this.label = data.label || '';
this.mode = data.mode || 1;
this.param = data.param;
this.sliderMax = data.sliderMax == null ? 100 : data.sliderMax;
this.sliderMin = data.sliderMin || 0;
this.targetName = data.target;
this.visible = data.visible == null ? true : data.visible;
this.x = data.x || 0;
this.y = data.y || 0;
return this;
};
Watcher.prototype.resolve = function() {
this.target = this.stage.getObject(this.targetName);
if (this.target && this.cmd === 'getVar:') {
var ref = this.target.varRefs[this.param];
if (ref) {
ref.watcher = this;
}
}
};
Watcher.prototype.draw = function(context) {
var value = 0;
if (!this.target) return;
switch (this.cmd) {
case 'answer':
value = this.stage.answer;
break;
case 'backgroundIndex':
value = this.stage.currentCostumeIndex + 1;
break;
case 'costumeIndex':
value = this.target.currentCostumeIndex + 1;
break;
case 'getVar:':
var ref = this.target.varRefs[this.param];
if (ref) {
if (this.mode === 3 && this.stage.mousePressed) {
var x = this.stage.mouseX + 240 - this.x - 5;
var y = 180 - this.stage.mouseY - this.y - 20;
if (x >= 0 && y >= 0 && x <= this.width - 5 - 5 && y <= 9) {
ref.value = this.sliderMin + Math.max(0, Math.min(1, (x - 2.5) / (this.width - 5 - 5 - 5))) * (this.sliderMax - this.sliderMin);
ref.value = this.isDiscrete ? Math.round(ref.value) : Math.round(ref.value * 100) / 100;
}
}
value = ref.value;
}
break;
case 'heading':
value = this.target.direction;
break;
case 'scale':
value = this.target.scale * 100;
break;
case 'sceneName':
ref = this.stage.costumes[this.stage.currentCostumeIndex];
if (ref) value = ref.costumeName;
break;
case 'senseVideoMotion':
// TODO
break;
case 'soundLevel':
// TODO
break;
case 'tempo':
value = this.stage.tempoBPM;
break;
case 'timeAndDate':
value = this.timeAndDate(this.param);
break;
case 'timer':
value = Math.round((this.stage.now() - this.stage.timerStart) / 100) / 10;
break;
case 'volume':
// TODO
break;
case 'xpos':
value = this.target.scratchX;
break;
case 'ypos':
value = this.target.scratchY;
break;
}
if (typeof value === 'number' && (value < 0.001 || value > 0.001)) {
value = Math.round(value * 1000) / 1000;
}
value = String(value);
if (this.labelWidth == null) {
context.font = 'bold 11px sans-serif';
this.labelWidth = context.measureText(this.label).width;
}
context.save();
context.translate(this.x, this.y);
if (this.mode === 1 || this.mode === 3) {
context.font = 'bold 11px sans-serif';
var dw = Math.max(41, 5 + context.measureText(value).width + 5);
var r = 5;
var w = this.width = 5 + this.labelWidth + 5 + dw + 5;
var h = this.mode === 1 ? 21 : 32;
context.strokeStyle = 'rgb(148, 145, 145)';
context.fillStyle = 'rgb(193, 196, 199)';
context.lineWidth = 2;
context.beginPath();
context.arc(r + 1, r + 1, r, Math.PI, Math.PI * 3/2, false);
context.arc(w - r - 1, r + 1, r, Math.PI * 3/2, 0, false);
context.arc(w - r - 1, h - r - 1, r, 0, Math.PI/2, false);
context.arc(r + 1, h - r - 1, r, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#000';
context.fillText(this.label, 5, 14);
var dh = 15;
var dx = 5 + this.labelWidth + 5;
var dy = 3;
var dr = 4;
context.save();
context.translate(dx, dy);
context.strokeStyle = '#fff';
context.fillStyle = this.color;
context.lineWidth = 2;
context.beginPath();
context.arc(dr + 1, dr + 1, dr, Math.PI, Math.PI * 3/2, false);
context.arc(dw - dr - 1, dr + 1, dr, Math.PI * 3/2, 0, false);
context.arc(dw - dr - 1, dh - dr - 1, dr, 0, Math.PI/2, false);
context.arc(dr + 1, dh - dr - 1, dr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#fff';
context.textAlign = 'center';
context.fillText(value, dw / 2, dh - 4);
context.restore();
if (this.mode === 3) {
var sh = 5;
var sw = w - 5 - 5;
var sr = 1.5;
var br = 4.5;
context.save();
context.translate(5, 22);
context.strokeStyle = 'rgb(148, 145, 145)';
context.fillStyle = 'rgb(213, 216, 219)';
context.lineWidth = 2;
context.beginPath();
context.arc(sr + 1, sr + 1, sr, Math.PI, Math.PI * 3/2, false);
context.arc(sw - sr - 1, sr + 1, sr, Math.PI * 3/2, 0, false);
context.arc(sw - sr - 1, sh - sr - 1, sr, 0, Math.PI/2, false);
context.arc(sr + 1, sh - sr - 1, sr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
var x = (sw - sh) * ((Number(value) || 0) - this.sliderMin) / (this.sliderMax - this.sliderMin);
context.strokeStyle = 'rgb(108, 105, 105)';
context.fillStyle = 'rgb(233, 236, 239)';
context.beginPath();
context.arc(x + sh / 2, sh / 2, br - 1, 0, Math.PI * 2, false);
context.stroke();
context.fill();
context.restore();
}
} else if (this.mode === 2) {
context.font = 'bold 15px sans-serif';
dh = 21;
dw = Math.max(41, 5 + context.measureText(value).width + 5);
dr = 4;
context.strokeStyle = '#fff';
context.fillStyle = this.color;
context.lineWidth = 2;
context.beginPath();
context.arc(dr + 1, dr + 1, dr, Math.PI, Math.PI * 3/2, false);
context.arc(dw - dr - 1, dr + 1, dr, Math.PI * 3/2, 0, false);
context.arc(dw - dr - 1, dh - dr - 1, dr, 0, Math.PI/2, false);
context.arc(dr + 1, dh - dr - 1, dr, Math.PI/2, Math.PI, false);
context.closePath();
context.stroke();
context.fill();
context.fillStyle = '#fff';
context.textAlign = 'center';
context.fillText(value, dw / 2, dh - 5);
}
context.restore();
};
return {
hasTouchEvents: hasTouchEvents,
getKeyCode: getKeyCode,
IO: IO,
Base: Base,
Stage: Stage,
Sprite: Sprite,
Watcher: Watcher
};
}());
P.compile = (function() {
'use strict';
var LOG_PRIMITIVES;
// LOG_PRIMITIVES = true;
var EVENT_SELECTORS = [
'procDef',
'whenClicked',
'whenCloned',
'whenGreenFlag',
'whenIReceive',
'whenKeyPressed',
'whenSceneStarts',
'whenSensorGreaterThan' // TODO
];
var compileScripts = function(object) {
for (var i = 0; i < object.scripts.length; i++) {
compileListener(object, object.scripts[i][2]);
}
};
var warnings;
var warn = function(message) {
warnings[message] = (warnings[message] || 0) + 1;
};
var name = 'a';
function varn() {
var i, s;
s = '';
i = name.length - 1;
while (i >= 0 && name[i] === 'z') {
s = 'a' + s;
--i;
}
if (i === -1) {
s = 'a' + s;
} else {
s = String.fromCharCode(name.charCodeAt(i) + 1) + s;
}
s = name.substr(0, i) + s;
name = s;
return '$tmp_' + s;
}
var compileListener = function(object, script) {
if (!script[0] || EVENT_SELECTORS.indexOf(script[0][0]) === -1) return;
var nextLabel = function() {
return object.fns.length + fns.length;
};
var label = function() {
var id = nextLabel();
fns.push(source.length);
return id;
};
var delay = function() {
source += 'return;\n';
label();
};
var queue = function(id) {
source += 'queue(' + id + ');\n';
source += 'return;\n';
};
var seq = function(script) {
if (!script) return;
for (var i = 0; i < script.length; i++) {
compile(script[i]);
}
};
var val = function(e) {
if (typeof e === 'number' || typeof e === 'boolean') {
return '' + e;
} else if (typeof e === 'string') {
return '"' + e
.replace(/\\/g, '\\\\')
.replace(/\n/g, '\\n')
.replace(/\r/g, '\\r')
.replace(/"/g, '\\"') + '"';
} else if (e[0] === 'xpos') { /* Motion */
return 'S.scratchX';
} else if (e[0] === 'ypos') {
return 'S.scratchY';
} else if (e[0] === 'heading') {
return 'S.direction';
} else if (e[0] === 'costumeIndex') { /* Looks */
return '(S.currentCostumeIndex + 1)';
} else if (e[0] === 'costumeName') {
return 'S.getCostumeName()';
} else if (e[0] === 'backgroundIndex') {
return '(self.currentCostumeIndex + 1)';
} else if (e[0] === 'sceneName') {
return 'self.getCostumeName()';
} else if (e[0] === 'scale') {
return 'S.scale';
// } else if (e[0] === 'volume') { /* Sound */
} else if (e[0] === 'tempo') {
return 'self.tempoBPM';
} else if (e[0] === 'getParam') { /* Data */
return '(C && C.args[' + val(e[1]) + '] != null ? C.args[' + val(e[1]) + '] : 0)';
} else if (e[0] === 'readVariable') {
return 'S.varRefs[' + val(e[1]) + '].value';
} else if (e[0] === 'contentsOfList:') {
return 'contentsOfList(' + val(e[1]) + ')';
} else if (e[0] === 'getLine:ofList:') {
return 'getLineOfList(' + val(e[2]) + ', ' + val(e[1]) + ')';
} else if (e[0] === 'lineCountOfList:') {
return 'lineCountOfList(' + val(e[1]) + ')';
} else if (e[0] === 'list:contains:') {
return 'listContains(' + val(e[1]) + ', ' + val(e[2]) + ')';
} else if (e[0] === '+') { /* Operators */
return '(' + num(e[1]) + ' + ' + num(e[2]) + ')';
} else if (e[0] === '-') {
return '(' + num(e[1]) + ' - ' + num(e[2]) + ')';
} else if (e[0] === '*') {
return '(' + num(e[1]) + ' * ' + num(e[2]) + ')';
} else if (e[0] === '/') {
return '(' + num(e[1]) + ' / ' + num(e[2]) + ')';
} else if (e[0] === 'randomFrom:to:') {
return 'random(' + num(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === '<') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === -1)';
} else if (e[0] === '=') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === 0)';
} else if (e[0] === '>') {
return '(compare(' + val(e[1]) + ', ' + val(e[2]) + ') === 1)';
} else if (e[0] === '&') {
return '(' + bool(e[1]) + ' && ' + bool(e[2]) + ')';
} else if (e[0] === '|') {
return '(' + bool(e[1]) + ' || ' + bool(e[2]) + ')';
} else if (e[0] === 'not') {
return '!' + bool(e[1]) + '';
} else if (e[0] === 'abs') {
return 'Math.abs(' + num(e[1]) + ')';
} else if (e[0] === 'sqrt') {
return 'Math.sqrt(' + num(e[1]) + ')';
} else if (e[0] === 'concatenate:with:') {
return '("" + ' + val(e[1]) + ' + ' + val(e[2]) + ')';
} else if (e[0] === 'letter:of:') {
return '(("" + ' + val(e[2]) + ')[Math.floor(' + num(e[1]) + ')] || "")';
} else if (e[0] === 'stringLength:') {
return '("" + ' + val(e[1]) + ').length';
} else if (e[0] === '%' || e[0] === '\\') {
return 'mod(' + num(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === 'rounded') {
return 'Math.round(' + num(e[1]) + ')';
} else if (e[0] === 'computeFunction:of:') {
return 'mathFunc(' + val(e[1]) + ', ' + num(e[2]) + ')';
} else if (e[0] === 'mousePressed') {
return 'self.mousePressed';
} else if (e[0] === 'mouseX') {
return 'self.mouseX';
} else if (e[0] === 'mouseY') {
return 'self.mouseY';
} else if (e[0] === 'touching:') { /* Sensing */
return 'S.touching(' + val(e[1]) + ')';
} else if (e[0] === 'touchingColor:') {
return 'S.touchingColor(' + val(e[1]) + ')';
// } else if (e[0] === 'color:sees:') {
} else if (e[0] === 'answer') {
return 'self.answer';
} else if (e[0] === 'timer') {
return '(self.now() - self.timerStart) / 1000';
} else if (e[0] === 'keyPressed:') {
return '!!self.keys[P.getKeyCode(' + val(e[1]) + ')]';
} else if (e[0] === 'distanceTo:') {
return 'S.distanceTo(' + val(e[1]) + ')';
} else if (e[0] === 'getAttribute:of:') {
return 'attribute(' + val(e[1]) + ', ' + val(e[2]) + ')';
// } else if (e[0] === 'getUserId') {
// } else if (e[0] === 'getUserName') {
// } else if (e[0] === 'soundLevel') {
// } else if (e[0] === 'isLoud') {
} else if (e[0] === 'timestamp') {
return '((Date.now() - epoch) / 86400000)';
} else if (e[0] === 'timeAndDate') {
return 'timeAndDate(' + val(e[1]) + ')';
// } else if (e[0] === 'sensor:') {
// } else if (e[0] === 'sensorPressed:') {
} else {
warn('Undefined val: ' + e[0]);
}
};
var bool = function(e) {
return 'bool(' + val(e) + ')';
};
var num = function(e) {
if (typeof e === 'number') {
return e;
}
if (typeof e === 'boolean' || typeof e === 'string') {
return Number(e) || 0;
}
return '(Number(' + val(e) + ') || 0)';
};
var compile = function(block) {
if (LOG_PRIMITIVES) {
source += 'console.log(' + val(block[0]) + ');\n';
}
if (block[0] === 'forward:') { /* Motion */
source += 'S.forward(' + num(block[1]) + ');\n';
} else if (block[0] === 'turnRight:') {
source += 'S.setDirection(S.direction + ' + num(block[1]) + ');\n';
} else if (block[0] === 'turnLeft:') {
source += 'S.setDirection(S.direction - ' + num(block[1]) + ');\n';
} else if (block[0] === 'heading:') {
source += 'S.setDirection(' + num(block[1]) + ');\n';
} else if (block[0] === 'pointTowards:') {
source += 'S.pointTowards(' + val(block[1]) + ');\n';
} else if (block[0] === 'gotoX:y:') {
source += 'S.moveTo(' + num(block[1]) + ', ' + num(block[2]) + ');\n';
} else if (block[0] === 'gotoSpriteOrMouse:') {
source += 'S.gotoObject(' + val(block[1]) + ');\n';
} else if (block[0] === 'changeXposBy:') {
source += 'S.moveTo(S.scratchX + ' + num(block[1]) + ', S.scratchY);\n';
} else if (block[0] === 'xpos:') {
source += 'S.moveTo(' + num(block[1]) + ', S.scratchY);\n';
} else if (block[0] === 'changeYposBy:') {
source += 'S.moveTo(S.scratchX, S.scratchY + ' + num(block[1]) + ');\n';
} else if (block[0] === 'ypos:') {
source += 'S.moveTo(S.scratchX, ' + num(block[1]) + ');\n';
} else if (block[0] === 'bounceOffEdge') {
source += 'S.bounceOffEdge();\n';
} else if (block[0] === 'setRotationStyle') {
source += 'var style = ' + val(block[1]) + ';\n';
source += 'S.rotationStyle = style === "left-right" ? "leftRight" : style === "don\'t rotate" ? "none" : "normal";';
} else if (block[0] === 'lookLike:') { /* Looks */
source += 'S.setCostume(' + val(block[1]) + ');\n';
} else if (block[0] === 'nextCostume') {
source += 'S.currentCostumeIndex = (S.currentCostumeIndex + 1) % S.costumes.length;\n';
} else if (block[0] === 'showBackground:' ||
block[0] === 'startScene') {
source += 'self.setCostume(' + val(block[1]) + ');\n';
source += 'sceneChange();\n';
} else if (block[0] === 'nextBackground' ||
block[0] === 'nextScene') {
source += 'S.currentCostumeIndex = (S.currentCostumeIndex + 1) % S.costumes.length;\n';
source += 'sceneChange();\n';
} else if (block[0] === 'startSceneAndWait') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
source += 'self.setCostume(' + val(block[1]) + ');\n';
source += 'R.threads = sceneChange();\n';
var id = label();
source += 'if (!running(R.threads)) {\n';
queue(id);
source += '}\n';
}
// } else if (block[0] === 'say:duration:elapsed:from:') {
} else if (block[0] === 'say:') {
source += 'console.log(' + val(block[1]) + ');\n';
// } else if (block[0] === 'think:duration:elapsed:from:') {
// } else if (block[0] === 'think:') {
} else if (block[0] === 'changeGraphicEffect:by:') {
source += 'S.filters[' + val(block[1]) + '] += ' + num(block[2]) + ';\n';
} else if (block[0] === 'setGraphicEffect:to:') {
source += 'S.filters[' + val(block[1]) + '] = ' + num(block[2]) + ';\n';
} else if (block[0] === 'filterReset') {
source += 'S.resetFilters();\n';
} else if (block[0] === 'changeSizeBy:') {
source += 'S.scale += ' + num(block[1]) + ' / 100;\n';
} else if (block[0] === 'setSizeTo:') {
source += 'S.scale = ' + num(block[1]) + ' / 100;\n';
} else if (block[0] === 'show') {
source += 'S.visible = true;\n';
} else if (block[0] === 'hide') {
source += 'S.visible = false;\n';
} else if (block[0] === 'comeToFront') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) self.children.splice(i, 1);\n';
source += 'self.children.push(S);\n';
} else if (block[0] === 'goBackByLayers:') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) {\n';
source += ' self.children.splice(i, 1);\n';
source += ' self.children.splice(Math.max(0, i - ' + num(block[1]) + '), 0, S);\n';
source += '}\n';
// } else if (block[0] === 'setVideoState') {
// } else if (block[0] === 'setVideoTransparency') {
// } else if (block[0] === 'playSound:') { /* Sound */
// } else if (block[0] === 'doPlaySoundAndWait') {
// } else if (block[0] === 'stopAllSounds') {
// } else if (block[0] === 'drum:duration:elapsed:from:') {
// } else if (block[0] === 'playDrum') {
// } else if (block[0] === 'rest:elapsed:from:') {
// } else if (block[0] === 'noteOn:duration:elapsed:from:') {
// } else if (block[0] === 'midiInstrument:') {
// } else if (block[0] === 'instrument:') {
// } else if (block[0] === 'changeVolumeBy:') {
// } else if (block[0] === 'setVolumeTo:') {
} else if (block[0] === 'changeTempoBy:') {
source += 'self.tempoBPM += ' + num(block[1]) + ';\n';
} else if (block[0] === 'setTempoTo:') {
source += 'self.tempoBPM = ' + num(block[1]) + ';\n';
} else if (block[0] === 'clearPenTrails') { /* Pen */
source += 'self.penCanvas.width = 480 * self.maxZoom;\n';
source += 'self.penContext.scale(self.maxZoom, self.maxZoom);\n';
} else if (block[0] === 'putPenDown') {
source += 'S.isPenDown = true;\n';
source += 'S.dotPen();\n';
} else if (block[0] === 'putPenUp') {
source += 'S.isPenDown = false;\n';
source += 'S.penState = null;\n';
} else if (block[0] === 'penColor:') {
source += 'var hsl = rgb2hsl(' + num(block[1]) + ');\n';
source += 'S.penHue = hsl[0];\n';
source += 'S.penSaturation = hsl[1];\n';
source += 'S.penLightness = hsl[2];\n';
} else if (block[0] === 'setPenHueTo:') {
source += 'S.penHue = ' + num(block[1]) + ' * 360 / 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'changePenHueBy:') {
source += 'S.penHue += ' + num(block[1]) + ' * 360 / 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'setPenShadeTo:') {
source += 'S.penLightness = ' + num(block[1]) + ' % 200;\n';
source += 'if (S.penLightness < 0) S.penLightness += 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'changePenShadeBy:') {
source += 'S.penLightness = (S.penLightness + ' + num(block[1]) + ') % 200;\n';
source += 'if (S.penLightness < 0) S.penLightness += 200;\n';
source += 'S.penSaturation = 100;\n';
} else if (block[0] === 'penSize:') {
source += 'S.penSize = ' + num(block[1]) + ';\n';
} else if (block[0] === 'changePenSizeBy:') {
source += 'S.penSize += ' + num(block[1]) + ';\n';
} else if (block[0] === 'stampCostume') {
source += 'S.draw(self.penContext);\n';
} else if (block[0] === 'setVar:to:') { /* Data */
source += 'if (S.varRefs[' + val(block[1]) + ']) S.varRefs[' + val(block[1]) + '].value = ' + val(block[2]) + ';\n';
} else if (block[0] === 'changeVar:by:') {
source += 'if (S.varRefs[' + val(block[1]) + ']) S.varRefs[' + val(block[1]) + '].value = (Number(S.varRefs[' + val(block[1]) + '].value) || 0) + ' + num(block[2]) + ';\n';
} else if (block[0] === 'append:toList:') {
source += 'appendToList(' + val(block[2]) + ', ' + val(block[1]) + ');\n';
} else if (block[0] === 'deleteLine:ofList:') {
source += 'deleteLineOfList(' + val(block[2]) + ', ' + val(block[1]) + ');\n';
} else if (block[0] === 'insert:at:ofList:') {
source += 'insertInList(' + val(block[3]) + ', ' + val(block[2]) + ', '+ val(block[1]) + ');\n';
} else if (block[0] === 'setLine:ofList:to:') {
source += 'setLineOfList(' + val(block[2]) + ', ' + val(block[1]) + ', '+ val(block[3]) + ');\n';
} else if (block[0] === 'showVariable:') {
source += 'showVariable(' + val(block[1]) + ', true);';
} else if (block[0] === 'hideVariable:') {
source += 'showVariable(' + val(block[1]) + ', false);';
// } else if (block[0] === 'showList:') {
// } else if (block[0] === 'hideList:') {
} else if (block[0] === 'broadcast:') { /* Control */
source += 'broadcast(' + val(block[1]) + ');';
} else if (block[0] === 'call') {
source += 'call(' + val(block[1]) + ', ' + (warp ? null : nextLabel()) + ', [';
for (var i = 2; i < block.length; i++) {
if (i > 2) {
source += ', ';
}
source += val(block[i]);
}
source += ']);\n';
if (!warp) delay();
} else if (block[0] === 'doBroadcastAndWait') {
source += 'R.threads = broadcast(' + val(block[1]) + ');\n';
var id = label();
source += 'if (running(R.threads)) {\n';
queue(id);
source += '}\n';
} else if (block[0] === 'doForever') {
var id = label();
seq(block[1]);
queue(id);
} else if (block[0] === 'doForeverIf') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
var id = label();
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
queue(id);
}
// } else if (block[0] === 'doForLoop') {
} else if (block[0] === 'doIf') {
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else if (block[0] === 'doIfElse') {
source += 'if (' + bool(block[1]) + ') {';
seq(block[2]);
source += '} else {';
seq(block[3]);
source += '}';
} else if (block[0] === 'doRepeat') {
source += 'save();\n';
source += 'R.count = ' + num(block[1]) + ';\n';
if (warp) {
source += 'while (R.count > 0) {\n';
source += ' R.count -= 1;\n';
seq(block[2]);
source += '}\n';
source += 'restore();\n';
} else {
var id = label();
source += 'if (R.count > 0) {\n';
source += ' R.count -= 1;\n';
seq(block[2]);
queue(id);
source += '} else {\n';
source += ' restore();\n';
source += '}\n';
}
} else if (block[0] === 'doReturn') {
source += 'endCall();\n';
source += 'return;\n';
} else if (block[0] === 'doUntil') {
if (warp) {
source += 'if (!' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else {
var id = label();
source += 'if (!' + bool(block[1]) + ') {\n';
seq(block[2]);
queue(id);
source += '}\n';
}
} else if (block[0] === 'doWhile') {
if (warp) {
source += 'while (' + bool(block[1]) + ') {\n';
seq(block[2]);
source += '}\n';
} else {
var id = label();
source += 'if (' + bool(block[1]) + ') {\n';
seq(block[2]);
queue(id);
source += '}\n';
}
} else if (block[0] === 'doWaitUntil') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
var id = label();
source += 'if (!' + bool(block[1]) + ') {\n';
queue(id);
source += '}\n';
}
} else if (block[0] === 'glideSecs:toX:y:elapsed:from:') {
if (warp) {
warn('Cannot be used at warp speed: ' + block);
} else {
source += 'save();\n';
source += 'R.start = self.now();\n';
source += 'R.duration = ' + num(block[1]) + ';\n';
source += 'R.baseX = S.scratchX;\n';
source += 'R.baseY = S.scratchY;\n';
source += 'R.deltaX = ' + num(block[2]) + ' - S.scratchX;\n';
source += 'R.deltaY = ' + num(block[3]) + ' - S.scratchY;\n';
var id = label();
source += 'var f = (self.now() - R.start) / (R.duration * 1000);\n';
source += 'if (f > 1) f = 1;\n';
source += 'S.moveTo(R.baseX + f * R.deltaX, R.baseY + f * R.deltaY);\n';
source += 'if (f < 1) {\n';
queue(id);
source += '}\n';
source += 'restore();\n';
}
} else if (block[0] === 'stopAll') {
source += 'self.stopAll();\n';
source += 'TERMINATE = true;\n';
source += 'return;\n';
} else if (block[0] === 'stopScripts') {
source += 'switch (' + val(block[1]) + ') {\n';
source += ' case "all":\n'
source += ' self.stopAll();\n';
source += ' TERMINATE = true;\n';
source += ' return;\n';
source += ' case "this script":\n';
source += ' endCall();\n';
source += ' return;\n';
source += ' case "other scripts in sprite":\n';
source += ' case "other scripts in stage":\n';
source += ' S.queue = [];\n';
source += ' TERMINATE = true;\n';
source += ' break;\n';
source += '}\n';
} else if (block[0] === 'wait:elapsed:from:') {
source += 'save();\n';
source += 'R.start = self.now();\n';
source += 'R.duration = ' + num(block[1]) + ';\n';
var id = label();
source += 'if (self.now() - R.start < R.duration * 1000) {\n';
queue(id);
source += '}\n';
source += 'restore();\n';
} else if (block[0] === 'warpSpeed') {
warp += 1;
seq(block[1]);
warp -= 1;
} else if (block[0] === 'createCloneOf') {
source += 'clone(' + val(block[1]) + ');\n'
} else if (block[0] === 'deleteClone') {
source += 'var i = self.children.indexOf(S);\n';
source += 'if (i > -1) self.children.splice(i, 1);\n';
source += 'S.queue = [];\n';
source += 'TERMINATE = true;\n';
source += 'return;\n';
} else if (block[0] === 'doAsk') { /* Sensing */
source += 'R.id = self.nextPromptId++;\n';
var id = label();
source += 'if (self.promptId < R.id) {\n';
queue(id);
source += '}\n';
source += 'S.ask(' + val(block[1]) + ');';
var id = label();
source += 'if (self.promptId === R.id) {\n';
queue(id);
source += '}\n';
} else if (block[0] === 'timerReset') {
source += 'self.timerStart = self.now();\n';
} else {
warn('Undefined command: ' + block[0]);
}
};
var source = '';
var startfn = object.fns.length;
var fns = [0];
var warp = 0;
if (script[0][0] === 'procDef') {
warp += 1;
}
for (var i = 1; i < script.length; i++) {
compile(script[i]);
}
if (script[0][0] === 'procDef') {
source += 'endCall();\n';
source += 'return;\n';
}
var createContinuation = function(source) {
var result = '(function() {\n';
var brackets = 0;
var delBrackets = 0;
var shouldDelete = false;
for (var i = 0; i < source.length; i++) {
if (shouldDelete) {
if (source[i] === '{') {
delBrackets += 1;
} else if (source[i] === '}') {
delBrackets -= 1;
if (delBrackets === 0) {
shouldDelete = false;
}
}
} else {
if (source.substr(i, 8) === '} else {') {
if (brackets > 0) {
result += '} else {';
i += 7;
} else {
shouldDelete = true;
delBrackets = 0;
}
} else if (source[i] === '{') {
brackets += 1;
result += '{';
} else if (source[i] === '}') {
if (brackets > 0) {
result += '}';
brackets -= 1;
}
} else {
result += source[i];
}
}
}
result += '})';
try {
return P.runtime.scopedEval(result);
} catch (e) {
debugger;
}
};
for (var i = 0; i < fns.length; i++) {
object.fns.push(createContinuation(source.slice(fns[i])));
}
var f = object.fns[startfn];
if (script[0][0] === 'whenClicked') {
object.listeners.whenClicked.push(f);
} else if (script[0][0] === 'whenGreenFlag') {
object.listeners.whenGreenFlag.push(f);
} else if (script[0][0] === 'whenCloned') {
object.listeners.whenCloned.push(f);
} else if (script[0][0] === 'whenIReceive') {
var key = script[0][1].toLowerCase();
(object.listeners.whenIReceive[key] || (object.listeners.whenIReceive[key] = [])).push(f);
} else if (script[0][0] === 'whenKeyPressed') {
object.listeners.whenKeyPressed[P.getKeyCode(script[0][1])].push(f);
} else if (script[0][0] === 'whenSceneStarts') {
var key = script[0][1].toLowerCase();
(object.listeners.whenSceneStarts[key] || (object.listeners.whenSceneStarts[key] = [])).push(f);
} else if (script[0][0] === 'procDef') {
object.procedures[script[0][1]] = {
inputs: script[0][2],
fn: f
};
} else {
warn('Undefined event: ' + script[0][0]);
}
};
return function(stage) {
warnings = Object.create(null);
compileScripts(stage);
for (var i = 0; i < stage.children.length; i++) {
if (!stage.children[i].cmd) {
compileScripts(stage.children[i]);
}
}
for (var key in warnings) {
console.warn(key + (warnings[key] > 1 ? ' (repeated ' + warnings[key] + ' times)' : ''));
}
};
}());
P.runtime = (function() {
'use strict';
var self, S, R, STACK, C, CALLS, BASE, THREAD, TERMINATE, STOP_THREAD = {};
var bool = function(v) {
return Number(v) !== 0 && v !== '' && v !== 'false' && v !== false;
};
var compare = function(x, y) {
var nx = Number(x);
var ny = Number(y);
if (nx === nx && ny === ny) {
return nx < ny ? -1 : nx === ny ? 0 : 1;
}
var xs = String(x);
var ys = String(y);
return xs < ys ? -1 : xs === ys ? 0 : 1;
};
var mod = function(x, y) {
var r = x % y;
if (r / y < 0) {
r += y;
}
return r;
};
var random = function(x, y) {
x = Number(x) || 0;
y = Number(y) || 0;
if (x > y) {
var tmp = y;
y = x;
x = tmp;
}
if (x % 1 === 0 && y % 1 === 0) {
return Math.floor(Math.random() * (y - x + 1)) + x;
}
return Math.random() * (y - x) + x;
};
var rgb2hsl = function(rgb) {
var r = (rgb >> 16 & 0xff) / 0xff;
var g = (rgb >> 8 & 0xff) / 0xff;
var b = (rgb & 0xff) / 0xff;
var min = Math.min(r, g, b);
var max = Math.max(r, g, b);
if (min === max) {
return [0, 0, r * 100];
}
var c = max - min;
var l = (min + max) / 2;
var s = c / (1 - Math.abs(2 * l - 1));
var h;
switch (max) {
case r: h = ((g - b) / c + 6) % 6; break;
case g: h = (b - r) / c + 2; break;
case b: h = (r - g) / c + 4; break;
}
h *= 60;
return [h, s * 100, l * 100];
};
var clone = function(name) {
var parent = name === '_myself_' ? S : self.getObject(name);
var c = parent.clone();
self.children.splice(self.children.indexOf(parent), 0, c);
self.triggerFor(c, 'whenCloned');
};
var epoch = Date.UTC(2000, 0, 1);
var timeAndDate = P.Watcher.prototype.timeAndDate = function(format) {
switch (format) {
case 'year':
return new Date().getFullYear();
case 'month':
return new Date().getMonth() + 1;
case 'date':
return new Date().getDate();
case 'day of week':
return new Date().getDay() + 1;
case 'hour':
return new Date().getHours();
case 'minute':
return new Date().getMinutes();
case 'second':
return new Date().getSeconds();
}
return 0;
};
var listIndex = function(list, index, length) {
if (index === 'random' || index === 'any') {
return Math.floor(Math.random() * length);
}
if (index === 'last') {
return length - 1;
}
var i = Math.floor(index) - 1;
return i === i && i >= 0 && i < length ? i : -1;
};
var contentsOfList = function(name) {
var list = S.listRefs[name];
if (!list) return '';
var isSingle = true;
for (var i = 0; i < list.contents.length; i++) {
if (list.contents[i].length !== 1) {
isSingle = false;
break;
}
}
return list.contents.join(isSingle ? '' : ' ');
};
var getLineOfList = function(name, index) {
var list = S.listRefs[name];
if (!list) return 0;
var i = listIndex(list, index, list.contents.length);
return list && i > -1 ? list.contents[i] : 0;
};
var lineCountOfList = function(name) {
var list = S.listRefs[name];
return list ? list.contents.length : 0;
};
var listContains = function(name, value) {
var list = S.listRefs[name];
return list ? list.contents.indexOf(value) > -1 : 0;
};
var appendToList = function(name, value) {
var list = S.listRefs[name];
if (list) {
list.contents.push(value);
}
};
var deleteLineOfList = function(name, index) {
var list = S.listRefs[name];
if (list) {
if (index === 'all') {
list.contents = [];
} else {
var i = listIndex(list, index, list.contents.length);
if (i > -1) {
list.contents.splice(i, 1);
}
}
}
};
var insertInList = function(name, index, value) {
var list = S.listRefs[name];
if (list) {
var i = listIndex(list, index, list.contents.length + 1);
if (i === list.contents.length) {
list.contents.push(value);
} else if (i > -1) {
list.contents.splice(i, 0, value);
}
}
};
var setLineOfList = function(name, index, value) {
var list = S.listRefs[name];
if (list) {
var i = listIndex(list, index, list.contents.length);
if (i > -1) {
list.contents[i] = value;
}
}
};
var mathFunc = function(f, x) {
switch (f) {
case 'abs':
case 'floor':
case 'sqrt':
return Math[f](x);
case 'ceiling':
return Math.ceil(x);
case 'cos':
x = 90 - x;
case 'sin':
// 0 <= x <= 45 for degrees->radians to work well
var neg = false;
x = x % 360;
if (x < 0) x += 360;
if (x > 180) {
neg = !neg;
x -= 180;
}
if (x > 90) {
x = 180 - x;
}
var z = x > 45 ?
Math.cos((90 - x) * Math.PI / 180) :
Math.sin(x * Math.PI / 180);
return neg ? -z : z;
case 'tan':
x = x % 180;
if (x < 0) x += 180;
return x > 90 ?
-Math.tan((90 - x) * Math.PI / 180) :
Math.tan(x * Math.PI / 180);
case 'asin':
case 'acos':
case 'atan':
return Math[f](x) * 180 / Math.PI;
case 'ln':
return Math.log(x);
case 'log':
return Math.log(x) / Math.LN10;
case 'e ^':
return Math.exp(x);
case '10 ^':
return Math.exp(x * Math.LN10)
}
return 0;
};
var showVariable = function(name, visible) {
var ref = S.varRefs[name];
if (ref) {
if (!ref.watcher) {
ref.watcher = new P.Watcher(self);
ref.watcher.x = self.defaultWatcherX;
ref.watcher.y = self.defaultWatcherY;
self.defaultWatcherY += 26;
if (self.defaultWatcherY >= 450) {
self.defaultWatcherY = 10;
self.defaultWatcherX += 150;
}
ref.watcher.target = S.variables.indexOf(ref) !== -1 ? S : self;
ref.watcher.label = (ref.watcher.target === self ? '' : ref.watcher.target.objName + ': ') + name;
ref.watcher.param = name;
self.children.push(ref.watcher);
}
ref.watcher.visible = visible;
}
};
var attribute = function(attr, objName) {
var o = self.getObject(objName);
if (!o) return 0;
if (o.isSprite) {
switch (attr) {
case 'x position': return o.scratchX;
case 'y position': return o.scratchY;
case 'direction': return o.direction;
case 'costume #': return o.currentCostumeIndex + 1;
case 'costume name': return o.costumes[o.currentCostumeIndex].costumeName;
case 'size': return o.scale * 100;
case 'volume': return 0; // TODO
}
} else {
switch (attr) {
case 'background #':
case 'backdrop #': return o.currentCostumeIndex + 1;
case 'backdrop name': return o.costumes[o.currentCostumeIndex].costumeName;
case 'volume': return 0; // TODO
}
}
var ref = o.varRefs[attr];
if (ref) {
return ref.value;
}
return 0;
};
var save = function() {
STACK.push(R);
R = {};
};
var restore = function() {
R = STACK.pop();
};
var call = function(spec, id, values) {
var procedure = S.procedures[spec];
if (procedure) {
var args = {};
for (var i = 0; i < values.length; i++) {
args[procedure.inputs[i]] = values[i];
}
STACK.push(R);
CALLS.push(C);
C = {
fn: S.fns[id],
args: args,
stack: STACK = []
};
R = {};
procedure.fn();
} else {
S.fns[id]();
}
};
var endCall = function() {
if (CALLS.length) {
var fn = C.fn;
C = CALLS.pop();
STACK = C.stack;
R = STACK.pop();
if (fn != null) fn();
} else {
throw STOP_THREAD;
}
};
var sceneChange = function() {
return self.trigger('whenSceneStarts', self.costumes[self.currentCostumeIndex].costumeName);
};
var broadcast = function(name) {
return self.trigger('whenIReceive', name);
};
var running = function(bases) {
for (var j = 0; j < self.queue.length; j++) {
if (self.queue[j] && bases.indexOf(self.queue[j].base) !== -1) return true;
}
for (var i = 0; i < self.children.length; i++) {
var c = self.children[i];
if (c.isSprite) {
for (var j = 0; j < c.queue.length; j++) {
if (c.queue[j] && bases.indexOf(c.queue[j].base) !== -1) return true;
}
}
}
return false;
};
var queue = function(id) {
S.queue[THREAD] = {
base: BASE,
fn: S.fns[id],
calls: CALLS
};
};
// Internal definition
(function() {
'use strict';
P.Stage.prototype.framerate = 30;
P.Base.prototype.initRuntime = function() {
this.queue = [];
};
P.Base.prototype.startThread = function(base) {
var thread = {
base: base,
fn: base,
calls: [{ args:{}, stack: [{}] }]
};
for (var i = 0; i < this.queue.length; i++) {
if (this.queue[i] && this.queue[i].base === base) {
this.queue[i] = thread;
if (S === this && THREAD === i) {
throw STOP_THREAD;
}
return;
}
}
this.queue.push(thread);
};
P.Stage.prototype.triggerFor = function(sprite, event, arg) {
var threads;
if (event === 'whenClicked') {
threads = sprite.listeners.whenClicked;
} else if (event === 'whenCloned') {
threads = sprite.listeners.whenCloned;
} else if (event === 'whenGreenFlag') {
threads = sprite.listeners.whenGreenFlag;
} else if (event === 'whenIReceive') {
threads = sprite.listeners.whenIReceive[arg.toLowerCase()]
} else if (event === 'whenKeyPressed') {
threads = sprite.listeners.whenKeyPressed[arg];
} else if (event === 'whenSceneStarts') {
threads = sprite.listeners.whenSceneStarts[arg.toLowerCase()];
}
if (threads) {
for (var i = 0; i < threads.length; i++) {
sprite.startThread(threads[i]);
}
return threads;
}
return [];
};
P.Stage.prototype.trigger = function(event, arg) {
var result = this.triggerFor(this, event, arg);
for (var i = 0; i < this.children.length; i++) {
if (this.children[i].isSprite) {
result = result.concat(this.triggerFor(this.children[i], event, arg));
}
}
return result;
};
P.Stage.prototype.triggerGreenFlag = function() {
this.timerStart = this.now();
this.trigger('whenGreenFlag');
};
P.Stage.prototype.start = function() {
this.isRunning = true;
if (this.interval) return;
this.baseTime = Date.now();
this.interval = setInterval(this.step.bind(this), 1000 / this.framerate);
};
P.Stage.prototype.pause = function() {
if (this.interval) {
this.baseNow = this.now();
clearInterval(this.interval);
delete this.interval;
}
this.isRunning = false;
};
P.Stage.prototype.stopAll = function() {
this.hidePrompt = false;
this.prompter.style.display = 'none';
this.promptId = this.nextPromptId = 0;
this.queue = [];
this.resetFilters();
for (var i = 0; i < this.children.length; i++) {
var c = this.children[i];
if (c.isClone) {
this.children.splice(i, 1);
i -= 1;
} else if (c.isSprite) {
c.queue = [];
c.resetFilters();
}
}
};
P.Stage.prototype.runFor = function(sprite) {
S = sprite;
var queue = sprite.queue;
TERMINATE = false;
for (THREAD = 0; THREAD < queue.length; THREAD++) {
if (queue[THREAD]) {
var fn = queue[THREAD].fn;
BASE = queue[THREAD].base;
CALLS = queue[THREAD].calls;
C = CALLS.pop();
STACK = C.stack;
R = STACK.pop();
queue[THREAD] = undefined;
try {
fn();
} catch (e) {
if (e !== STOP_THREAD) throw e;
queue[THREAD] = undefined;
continue;
}
STACK.push(R);
CALLS.push(C);
if (TERMINATE) return;
}
}
for (var i = queue.length; i--;) {
if (!queue[i]) queue.splice(i, 1);
}
};
P.Stage.prototype.now = function() {
return this.baseNow + Date.now() - this.baseTime;
};
P.Stage.prototype.step = function() {
try {
self = this;
var start = Date.now();
do {
var children = this.children.slice(0);
for (var i = 0; i < children.length; i++) {
if (children[i].isSprite) {
this.runFor(children[i]);
}
}
this.runFor(this);
} while (self.isTurbo && Date.now() - start < 1000 / this.framerate);
this.draw();
S = null;
} catch (e) {
this.handleError(e);
clearInterval(this.interval);
}
};
P.Stage.prototype.handleError = function() {
console.error(e.stack);
};
}());
return {
scopedEval: function(source) {
return eval(source);
}
};
}());
| Fixed warped doUntil
| phosphorus.js | Fixed warped doUntil | <ide><path>hosphorus.js
<ide>
<ide> if (warp) {
<ide>
<del> source += 'if (!' + bool(block[1]) + ') {\n';
<add> source += 'while (!' + bool(block[1]) + ') {\n';
<ide> seq(block[2]);
<ide> source += '}\n';
<ide> |
|
JavaScript | apache-2.0 | dac25bd24e51ac9478234ca03c312d3c9d4011d2 | 0 | justinlawrence/chordboard,justinlawrence/chordboard | import React, { Component } from 'react';
import { connect } from 'react-redux';
import { withRouter } from 'react-router-dom';
import FacebookLogin from 'react-facebook-login';
import { setCurrentUser } from 'actions';
import './login.scss';
class Login extends Component {
state = {
name: ''
};
handleInput = event => this.setState( { name: event.target.value } );
/*handleLogin = event => {
event.preventDefault();
localStorage.setItem( 'user', this.state.name );
this.props.setCurrentUser( {
name: this.state.name
} );
if ( this.props.history ) {
this.props.history.push( {
pathname: '/sets'
} );
}
};*/
responseFacebook = response => {
if ( response && response.userID ) {
const user = {
id: response.userID,
name: response.name
};
this.props.setCurrentUser( user );
try {
localStorage.setItem( 'user', JSON.stringify( user ) );
} catch ( err ) {
console.error( 'Could not set `user` in localStorage' );
}
if ( this.props.history ) {
this.props.history.push( {
pathname: '/sets'
} );
}
}
};
render() {
const props = this.props;
const { name } = this.state;
return (
<div>
<section className="hero is-dark is-fullheight">
<div className="hero-body">
<div className="container has-text-centered">
<h1 className="title is-1">Chordboard</h1>
<h2 className="subtitle">On the same page</h2>
<form onSubmit={this.handleLogin}>
<div className="field">
<FacebookLogin
appId="2075514469393369"
autoLoad={true}
callback={this.responseFacebook}
/>
</div>
</form>
</div>
</div>
</section>
</div>
);
}
}
const mapDispatchToProps = {
setCurrentUser
};
export default withRouter( connect( null, mapDispatchToProps )( Login ) );
| src/app/login/Login.js | import React, { Component } from 'react';
import { connect } from 'react-redux';
import { withRouter } from 'react-router-dom';
import FacebookLogin from 'react-facebook-login';
import { setCurrentUser } from 'actions';
import './login.scss';
class Login extends Component {
state = {
name: ''
};
handleInput = event => this.setState( { name: event.target.value } );
/*handleLogin = event => {
event.preventDefault();
localStorage.setItem( 'user', this.state.name );
this.props.setCurrentUser( {
name: this.state.name
} );
if ( this.props.history ) {
this.props.history.push( {
pathname: '/sets'
} );
}
};*/
responseFacebook = response => {
if ( response && response.userID ) {
const user = {
id: response.userID,
name: response.name
};
this.props.setCurrentUser( user );
try {
localStorage.setItem( 'user', JSON.stringify( user ) );
} catch ( err ) {
console.error( 'Could not set `user` in localStorage' );
}
if ( this.props.history ) {
this.props.history.push( {
pathname: '/sets'
} );
}
}
};
render() {
const props = this.props;
const { name } = this.state;
return (
<div>
<section className="hero is-dark">
<div className="hero-body">
<div className="container">
<div className="columns is-vcentered">
<div className="column">
<h1 className="title is-1">Chordboard</h1>
<h2 className="subtitle">The setlist for live musicians.</h2>
</div>
</div>
</div>
</div>
</section>
<section className="section columns">
<div className="column is-one-third is-offset-one-third">
<article className="card is-rounded">
<div className="card-content">
<form onSubmit={this.handleLogin}>
<h1 className="title">
Login
</h1>
<div className="field">
<FacebookLogin
appId="2075514469393369"
autoLoad={true}
callback={this.responseFacebook}
/>
</div>
{/*<div className="field">
<p className="control has-icons-left">
<input className="input is-medium" type="text"
placeholder="Your Name"
onInput={this.handleInput} value={name}/>
<span className="icon is-small is-left"><i
className="fa fa-envelope"/></span>
</p>
</div>
<div className="field">
<p className="control has-icons-left">
<input className="input is-medium" type="password"
placeholder="Password"/>
<span className="icon is-small is-left"><i
className="fa fa-lock"/></span>
</p>
</div>
<div className="field">
<p className="control">
<button
className="button is-primary is-medium is-fullwidth">
Login
</button>
</p>
</div>*/}
</form>
</div>
</article>
</div>
</section>
</div>
);
}
}
const mapDispatchToProps = {
setCurrentUser
};
export default withRouter( connect( null, mapDispatchToProps )( Login ) );
| login page styling
| src/app/login/Login.js | login page styling | <ide><path>rc/app/login/Login.js
<ide>
<ide> return (
<ide> <div>
<del> <section className="hero is-dark">
<add> <section className="hero is-dark is-fullheight">
<ide> <div className="hero-body">
<del> <div className="container">
<del> <div className="columns is-vcentered">
<del> <div className="column">
<add> <div className="container has-text-centered">
<ide> <h1 className="title is-1">Chordboard</h1>
<del> <h2 className="subtitle">The setlist for live musicians.</h2>
<del> </div>
<del> </div>
<add> <h2 className="subtitle">On the same page</h2>
<add> <form onSubmit={this.handleLogin}>
<add>
<add> <div className="field">
<add> <FacebookLogin
<add> appId="2075514469393369"
<add> autoLoad={true}
<add> callback={this.responseFacebook}
<add> />
<add> </div>
<add> </form>
<ide> </div>
<ide> </div>
<del> </section>
<del>
<del> <section className="section columns">
<del> <div className="column is-one-third is-offset-one-third">
<del> <article className="card is-rounded">
<del> <div className="card-content">
<del> <form onSubmit={this.handleLogin}>
<del> <h1 className="title">
<del> Login
<del> </h1>
<del>
<del> <div className="field">
<del> <FacebookLogin
<del> appId="2075514469393369"
<del> autoLoad={true}
<del> callback={this.responseFacebook}
<del> />
<del> </div>
<del>
<del> {/*<div className="field">
<del> <p className="control has-icons-left">
<del> <input className="input is-medium" type="text"
<del> placeholder="Your Name"
<del> onInput={this.handleInput} value={name}/>
<del> <span className="icon is-small is-left"><i
<del> className="fa fa-envelope"/></span>
<del> </p>
<del> </div>
<del>
<del> <div className="field">
<del> <p className="control has-icons-left">
<del> <input className="input is-medium" type="password"
<del> placeholder="Password"/>
<del> <span className="icon is-small is-left"><i
<del> className="fa fa-lock"/></span>
<del> </p>
<del> </div>
<del>
<del> <div className="field">
<del> <p className="control">
<del> <button
<del> className="button is-primary is-medium is-fullwidth">
<del> Login
<del> </button>
<del> </p>
<del> </div>*/}
<del>
<del> </form>
<del>
<del> </div>
<del> </article>
<del>
<del> </div>
<del>
<ide> </section>
<ide>
<ide> </div> |
|
Java | mit | 0e9f4a0154aca11f6bb934f037bac14371374599 | 0 | freefair/android-injection | package io.freefair.injection.injector;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.annotation.Annotation;
import java.lang.ref.WeakReference;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.WeakHashMap;
import io.freefair.injection.annotation.Inject;
import io.freefair.injection.annotation.Value;
import io.freefair.injection.exceptions.InjectionException;
import io.freefair.injection.reflection.Reflection;
import io.freefair.util.function.Optional;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import static lombok.AccessLevel.PROTECTED;
/**
* Abstact implementation of a dependency injector
*/
@Slf4j
public abstract class Injector {
private final Optional<Injector> parentInjector;
public Injector(Object... parentInjectors) {
if (parentInjectors == null) {
this.parentInjector = Optional.empty();
} else {
this.parentInjector = Optional.of(InjectorUtils.getParentInjector(parentInjectors));
}
topClasses = new HashSet<>();
}
private WeakHashMap<Object, Class<?>> alreadyInjectedInstances = new WeakHashMap<>();
private static WeakHashMap<Object, Injector> responsibleInjectors = new WeakHashMap<>();
protected Injector getInjector(Object instance) {
Injector injector = responsibleInjectors.get(instance);
if (injector != null)
return injector;
return this;
}
/**
* Injects as much as possible into the given object
*
* @param instance The object to inject into
*/
public final void inject(@NotNull Object instance) {
inject(instance, instance.getClass());
}
Deque<Object> instancesStack = new LinkedList<>();
public final void inject(@NotNull Object instance, @NotNull Class<?> clazz) {
responsibleInjectors.put(instance, this);
if (!alreadyInjectedInstances.containsKey(instance)) {
long start = System.currentTimeMillis();
instancesStack.addLast(instance);
alreadyInjectedInstances.put(instance, clazz);
for (Field field : getFields(clazz)) {
log.trace("Visit field {}", field);
visitField(instance, FieldWrapper.of(field));
}
instancesStack.removeLast();
long end = System.currentTimeMillis();
log.debug("Injection of " + instance + " took " + (end - start) + "ms");
}
}
private static WeakHashMap<Class<?>,List<Field>> fieldCache = new WeakHashMap<>();
private List<Field> getFields(@NotNull Class<?> clazz) {
if(!fieldCache.containsKey(clazz))
fieldCache.put(clazz, Reflection.getAllFields(clazz, getUpToExcluding(clazz)));
return fieldCache.get(clazz);
}
@Getter(PROTECTED)
private Set<Class<?>> topClasses;
@NotNull
@SuppressWarnings("unchecked")
private <X> Class<X> getUpToExcluding(Class<? extends X> clazz) {
for (Class<?> topClazz : topClasses) {
if (topClazz.isAssignableFrom(clazz))
return (Class<X>) topClazz;
}
return (Class<X>) Object.class;
}
/**
* Inject the field, or call super
*
* @param instance the instance to inject into
* @param field the field to inject
*/
protected void visitField(@NotNull Object instance, @NotNull FieldWrapper field) {
if (field.isAnnotationPresent(Inject.class)) {
Inject injectAnnotation = field.getAnnotation(Inject.class);
Class<?> targetType = injectAnnotation.value().equals(Object.class)
? field.getType()
: injectAnnotation.value();
Optional<?> bean = getInjector(instance).resolveBean(targetType, instance);
field.set(instance, bean.orNull());
}
if (field.isAnnotationPresent(Value.class)) {
Value valueAnnotation = field.getAnnotation(Value.class);
Class<?> targetType = field.getType();
Optional<?> value = getInjector(instance).resolveValue(valueAnnotation.value(), targetType);
field.set(instance, value.orNull());
}
}
/**
* Resolve the given type to an object, or call super
* <p/>
* The base implementation asks the parent if possible or tries to provide a new instance
*
* @param <T> the type of the object to return
* @param type the type of the object to return
* @param instance the instance the returned object will be injected into
* @return The object to use for the given type
*/
@SuppressWarnings("unchecked")
@NotNull
public <T> Optional<? extends T> resolveBean(@NotNull Class<T> type, @Nullable Object instance) {
for (Object inst : instancesStack) {
if (type.isInstance(inst))
return Optional.of((T) inst);
}
if (parentInjector.isPresent()) {
return parentInjector.get().resolveBean(type, instance);
} else {
return Optional.empty();
}
}
@NotNull
public <V> Optional<V> resolveValue(String key, Class<V> type) {
if (parentInjector.isPresent())
return parentInjector.get().resolveValue(key, type);
else
return Optional.empty();
}
protected static class FieldWrapper {
private static WeakHashMap<Field, FieldWrapper> cache = new WeakHashMap<>();
@Getter
private final Field field;
@Getter
private final Class<?> type;
@Getter
@Setter
private boolean optional;
private FieldWrapper(Field field) {
this.field = field;
this.type = resolveType();
if (field.isAnnotationPresent(io.freefair.injection.annotation.Optional.class))
optional = true;
}
private Class<?> resolveType() {
if (field.getType().equals(Optional.class)) {
setOptional(true);
return (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
}
if (field.getType().equals(WeakReference.class))
return (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
return field.getType();
}
public static FieldWrapper of(Field field) {
if (cache.containsKey(field)) {
return cache.get(field);
}
FieldWrapper fieldWrapper = new FieldWrapper(field);
cache.put(field, fieldWrapper);
return fieldWrapper;
}
public void set(Object instance, Object value) {
if (value == null && !isOptional()) {
throw new InjectionException("No value for required field " + field.toString());
}
try {
field.setAccessible(true);
} catch (SecurityException ignored) {
}
if (field.getType().equals(Optional.class))
value = Optional.ofNullable(value);
if (field.getType().equals(WeakReference.class))
value = new WeakReference<>(value);
try {
field.set(instance, value);
} catch (IllegalAccessException e) {
log.error("Cannot inject value", e);
throw new InjectionException(e);
}
}
public boolean isAnnotationPresent(Class<? extends Annotation> annotationClass) {
return getField().isAnnotationPresent(annotationClass);
}
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return getField().getAnnotation(annotationClass);
}
}
}
| java-injection/src/main/java/io/freefair/injection/injector/Injector.java | package io.freefair.injection.injector;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.annotation.Annotation;
import java.lang.ref.WeakReference;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.WeakHashMap;
import io.freefair.injection.annotation.Inject;
import io.freefair.injection.annotation.Value;
import io.freefair.injection.exceptions.InjectionException;
import io.freefair.injection.reflection.Reflection;
import io.freefair.util.function.Optional;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import static lombok.AccessLevel.PROTECTED;
/**
* Abstact implementation of a dependency injector
*/
@Slf4j
public abstract class Injector {
private final Optional<Injector> parentInjector;
public Injector(Object... parentInjectors) {
if (parentInjectors == null) {
this.parentInjector = Optional.empty();
} else {
this.parentInjector = Optional.of(InjectorUtils.getParentInjector(parentInjectors));
}
topClasses = new HashSet<>();
}
private WeakHashMap<Object, Class<?>> alreadyInjectedInstances = new WeakHashMap<>();
private static WeakHashMap<Object, Injector> responsibleInjectors = new WeakHashMap<>();
protected Injector getInjector(Object instance) {
Injector injector = responsibleInjectors.get(instance);
if (injector != null)
return injector;
return this;
}
/**
* Injects as much as possible into the given object
*
* @param instance The object to inject into
*/
public final void inject(@NotNull Object instance) {
inject(instance, instance.getClass());
}
Deque<Object> instancesStack = new LinkedList<>();
public final void inject(@NotNull Object instance, @NotNull Class<?> clazz) {
responsibleInjectors.put(instance, this);
if (!alreadyInjectedInstances.containsKey(instance)) {
long start = System.currentTimeMillis();
instancesStack.addLast(instance);
alreadyInjectedInstances.put(instance, clazz);
for (Field field : getFields(clazz)) {
log.trace("Visit field {}", field);
visitField(instance, FieldWrapper.of(field));
}
instancesStack.removeLast();
long end = System.currentTimeMillis();
log.debug("Injection of " + instance + " took " + (end - start) + "ms");
}
}
private static WeakHashMap<Class<?>,List<Field>> fieldCache = new WeakHashMap<>();
private List<Field> getFields(@NotNull Class<?> clazz) {
if(!fieldCache.containsKey(clazz))
fieldCache.put(clazz, Reflection.getAllFields(clazz, getUpToExcluding(clazz)));
return fieldCache.get(clazz);
}
@Getter(PROTECTED)
private Set<Class<?>> topClasses;
@NotNull
@SuppressWarnings("unchecked")
private <X> Class<X> getUpToExcluding(Class<? extends X> clazz) {
for (Class<?> topClazz : topClasses) {
if (topClazz.isAssignableFrom(clazz))
return (Class<X>) topClazz;
}
return (Class<X>) Object.class;
}
/**
* Inject the field, or call super
*
* @param instance the instance to inject into
* @param field the field to inject
*/
protected void visitField(@NotNull Object instance, @NotNull FieldWrapper field) {
if (field.isAnnotationPresent(Inject.class)) {
Inject injectAnnotation = field.getAnnotation(Inject.class);
Class<?> targetType = injectAnnotation.value().equals(Object.class)
? field.getType()
: injectAnnotation.value();
Optional<?> bean = getInjector(instance).resolveBean(targetType, instance);
field.set(instance, bean.orNull());
}
if (field.isAnnotationPresent(Value.class)) {
Value valueAnnotation = field.getAnnotation(Value.class);
Class<?> targetType = field.getType();
Optional<?> value = getInjector(instance).resolveValue(valueAnnotation.value(), targetType);
field.set(instance, value.orNull());
}
if (parentInjector.isPresent()) {
parentInjector.get().visitField(instance, field);
}
}
/**
* Resolve the given type to an object, or call super
* <p/>
* The base implementation asks the parent if possible or tries to provide a new instance
*
* @param <T> the type of the object to return
* @param type the type of the object to return
* @param instance the instance the returned object will be injected into
* @return The object to use for the given type
*/
@SuppressWarnings("unchecked")
@NotNull
public <T> Optional<? extends T> resolveBean(@NotNull Class<T> type, @Nullable Object instance) {
for (Object inst : instancesStack) {
if (type.isInstance(inst))
return Optional.of((T) inst);
}
if (parentInjector.isPresent()) {
return parentInjector.get().resolveBean(type, instance);
} else {
return Optional.empty();
}
}
@NotNull
public <V> Optional<V> resolveValue(String key, Class<V> type) {
if (parentInjector.isPresent())
return parentInjector.get().resolveValue(key, type);
else
return Optional.empty();
}
protected static class FieldWrapper {
private static WeakHashMap<Field, FieldWrapper> cache = new WeakHashMap<>();
@Getter
private final Field field;
@Getter
private final Class<?> type;
@Getter
@Setter
private boolean optional;
private FieldWrapper(Field field) {
this.field = field;
this.type = resolveType();
if (field.isAnnotationPresent(io.freefair.injection.annotation.Optional.class))
optional = true;
}
private Class<?> resolveType() {
if (field.getType().equals(Optional.class)) {
setOptional(true);
return (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
}
if (field.getType().equals(WeakReference.class))
return (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
return field.getType();
}
public static FieldWrapper of(Field field) {
if (cache.containsKey(field)) {
return cache.get(field);
}
FieldWrapper fieldWrapper = new FieldWrapper(field);
cache.put(field, fieldWrapper);
return fieldWrapper;
}
public void set(Object instance, Object value) {
if (value == null && !isOptional()) {
throw new InjectionException("No value for required field " + field.toString());
}
try {
field.setAccessible(true);
} catch (SecurityException ignored) {
}
if (field.getType().equals(Optional.class))
value = Optional.ofNullable(value);
if (field.getType().equals(WeakReference.class))
value = new WeakReference<>(value);
try {
field.set(instance, value);
} catch (IllegalAccessException e) {
log.error("Cannot inject value", e);
throw new InjectionException(e);
}
}
public boolean isAnnotationPresent(Class<? extends Annotation> annotationClass) {
return getField().isAnnotationPresent(annotationClass);
}
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return getField().getAnnotation(annotationClass);
}
}
}
| fix in injector
| java-injection/src/main/java/io/freefair/injection/injector/Injector.java | fix in injector | <ide><path>ava-injection/src/main/java/io/freefair/injection/injector/Injector.java
<ide> Optional<?> value = getInjector(instance).resolveValue(valueAnnotation.value(), targetType);
<ide>
<ide> field.set(instance, value.orNull());
<del> }
<del>
<del> if (parentInjector.isPresent()) {
<del> parentInjector.get().visitField(instance, field);
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 5b91a19d98b93b407657e1da0994e14293e91889 | 0 | xburgos/Book | package org.digitalcraftsman.book;
import java.util.concurrent.*;
import java.util.Iterator;
import java.util.function.Function;
public class Book<T> implements Iterable<T> {
private static final double PRELOAD_THRESSHOLD = 0.5;
private static final long DEFAULT_START_PAGE = 1;
private static final long DEFAULT_PAGE_SIZE = 10;
private final Function<Page, Pageable<T>> turnPage;
private final ExecutorService executorService;
private final long startPage;
private final long pageSize;
public Book(Function<Page, Pageable<T>> turnPage) {
if(turnPage == null) throw new IllegalArgumentException("turnPage must not be null");
this.executorService = Executors.newFixedThreadPool(4);
this.turnPage = turnPage;
this.startPage = DEFAULT_START_PAGE;
this.pageSize = DEFAULT_PAGE_SIZE;
}
public Book(Function<Page, Pageable<T>> turnPage, long startPage, long pageSize) {
if(turnPage == null) throw new IllegalArgumentException("turnPage must not be null");
this.executorService = Executors.newFixedThreadPool(4);
this.turnPage = turnPage;
this.startPage = startPage;
this.pageSize = pageSize;
}
@Override
public Iterator<T> iterator() {
Pageable<T> firstPageContents = turnPage.apply(new Page(startPage, pageSize));
return new BookIterator(firstPageContents);
}
private class BookIterator implements Iterator<T> {
private Iterator<T> currentPageContents;
private long currentLine;
private Pageable currentPage;
private Future<Pageable<T>> nextPage;
public BookIterator(Pageable<T> page) {
this.currentPageContents = page.getPageContents().iterator();
this.currentPage = page;
this.currentLine = 1;
}
@Override
public boolean hasNext() {
return currentPageContents.hasNext();
}
@Override
public T next() {
T line = currentPageContents.next();
if(moreThanHalfPageHasBeenRead() && nextPageHasNotBeenRequested()) {
requestNextPage();
}
if(!currentPageContents.hasNext()) {
try {
this.currentPage = nextPage.get();
this.currentPageContents = currentPage.getPageContents().iterator();
this.nextPage = null;
this.currentLine = 1;
return line;
} catch (InterruptedException e) {
e.printStackTrace();
Thread.interrupted();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
this.currentLine += 1;
return line;
}
private void requestNextPage() {
this.nextPage = executorService.submit(this::getNextPage);
}
private boolean nextPageHasNotBeenRequested() {
return nextPage == null;
}
private boolean moreThanHalfPageHasBeenRead() {
double percentage = (double)currentLine / (double)currentPage.getPageContents().size();
return percentage >= PRELOAD_THRESSHOLD;
}
private Pageable<T> getNextPage() {
Pageable<T> contents = turnPage.apply(new Page(currentPage.getPageNumber() + 1, currentPage.getPageSize()));
return contents != null ? contents: null;
}
}
}
| src/main/java/org/digitalcraftsman/book/Book.java | package org.digitalcraftsman.book;
import java.util.concurrent.*;
import java.util.Iterator;
import java.util.function.Function;
public class Book<T> implements Iterable<T> {
private static final double PRELOAD_THRESSHOLD = 0.5;
private static final long DEFAULT_START_PAGE = 1;
private static final long DEFAULT_PAGE_SIZE = 10;
private final Function<Page, Pageable<T>> turnPage;
private final ExecutorService executorService;
private final long startPage;
private final long pageSize;
public Book(Function<Page, Pageable<T>> turnPage) {
if(turnPage == null) throw new IllegalArgumentException("turnPage must not be null");
this.executorService = Executors.newFixedThreadPool(4);
this.turnPage = turnPage;
this.startPage = DEFAULT_START_PAGE;
this.pageSize = DEFAULT_PAGE_SIZE;
}
public Book(Function<Page, Pageable<T>> turnPage, long startPage, long pageSize) {
if(turnPage == null) throw new IllegalArgumentException("turnPage must not be null");
this.executorService = Executors.newFixedThreadPool(4);
this.turnPage = turnPage;
this.startPage = startPage;
this.pageSize = pageSize;
}
@Override
public Iterator<T> iterator() {
Pageable<T> firstPageContents = turnPage.apply(new Page(startPage, pageSize));
return new BookIterator(firstPageContents);
}
private class BookIterator implements Iterator<T> {
private Iterator<T> currentPageContents;
private long currentLine;
private Pageable currentPage;
private Future<Pageable<T>> nextPage;
public BookIterator(Pageable<T> page) {
this.currentPageContents = page.getPageContents().iterator();
this.currentPage = page;
this.currentLine = 1;
}
@Override
public boolean hasNext() {
return currentPageContents.hasNext();
}
@Override
public T next() {
T line = currentPageContents.next();
if(moreThanHalfPageHasBeenRead() && nextPage == null) {
this.nextPage = executorService.submit(this::getNextPage);
}
if(!currentPageContents.hasNext()) {
try {
this.currentPage = nextPage.get();
this.currentPageContents = currentPage.getPageContents().iterator();
this.nextPage = null;
this.currentLine = 1;
return line;
} catch (InterruptedException e) {
e.printStackTrace();
Thread.interrupted();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
this.currentLine += 1;
return line;
}
private boolean moreThanHalfPageHasBeenRead() {
double percentage = (double)currentLine / (double)currentPage.getPageContents().size();
return percentage >= PRELOAD_THRESSHOLD;
}
private Pageable<T> getNextPage() {
Pageable<T> contents = turnPage.apply(new Page(currentPage.getPageNumber() + 1, currentPage.getPageSize()));
return contents != null ? contents: null;
}
}
}
| Renamed some methods for improved readability
| src/main/java/org/digitalcraftsman/book/Book.java | Renamed some methods for improved readability | <ide><path>rc/main/java/org/digitalcraftsman/book/Book.java
<ide> @Override
<ide> public T next() {
<ide> T line = currentPageContents.next();
<del> if(moreThanHalfPageHasBeenRead() && nextPage == null) {
<del> this.nextPage = executorService.submit(this::getNextPage);
<add> if(moreThanHalfPageHasBeenRead() && nextPageHasNotBeenRequested()) {
<add> requestNextPage();
<ide> }
<ide> if(!currentPageContents.hasNext()) {
<ide> try {
<ide> return line;
<ide> }
<ide>
<add> private void requestNextPage() {
<add> this.nextPage = executorService.submit(this::getNextPage);
<add> }
<add>
<add> private boolean nextPageHasNotBeenRequested() {
<add> return nextPage == null;
<add> }
<add>
<ide> private boolean moreThanHalfPageHasBeenRead() {
<ide> double percentage = (double)currentLine / (double)currentPage.getPageContents().size();
<ide> return percentage >= PRELOAD_THRESSHOLD; |
|
Java | apache-2.0 | 0fa1c1ade1281893428e451a9e0a05a5b64b9611 | 0 | kuujo/onos,oplinkoms/onos,oplinkoms/onos,oplinkoms/onos,kuujo/onos,gkatsikas/onos,opennetworkinglab/onos,gkatsikas/onos,kuujo/onos,oplinkoms/onos,oplinkoms/onos,gkatsikas/onos,opennetworkinglab/onos,oplinkoms/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,opennetworkinglab/onos,kuujo/onos,kuujo/onos,kuujo/onos,kuujo/onos,gkatsikas/onos,gkatsikas/onos,opennetworkinglab/onos,gkatsikas/onos | /*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This work was partially supported by EC H2020 project METRO-HAUL (761727).
*/
package org.onosproject.drivers.odtn.openconfig;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.slf4j.LoggerFactory.getLogger;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.concurrent.CompletableFuture;
import org.onlab.packet.ChassisId;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.commons.configuration.tree.xpath.XPathExpressionEngine;
import org.onosproject.drivers.utilities.XmlConfigParser;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceDescriptionDiscovery;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DefaultPortDescription.Builder;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.SparseAnnotations;
import org.onosproject.net.Port.Type;
import org.onosproject.net.PortNumber;
import org.onosproject.netconf.NetconfController;
import org.onosproject.netconf.NetconfDevice;
import org.onosproject.netconf.NetconfException;
import org.onosproject.netconf.NetconfSession;
import com.google.common.collect.ImmutableList;
import org.onosproject.odtn.behaviour.OdtnDeviceDescriptionDiscovery;
/**
* Driver Implementation of the DeviceDescrption discovery for OpenConfig
* terminal devices.
*
*/
public class TerminalDeviceDiscovery
extends AbstractHandlerBehaviour
implements OdtnDeviceDescriptionDiscovery, DeviceDescriptionDiscovery {
private static final String RPC_TAG_NETCONF_BASE =
"<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">";
private static final String RPC_CLOSE_TAG = "</rpc>";
private static final String OC_PLATFORM_TYPES_TRANSCEIVER =
"oc-platform-types:TRANSCEIVER";
private static final String OC_PLATFORM_TYPES_PORT =
"oc-platform-types:PORT";
private static final String OC_TRANSPORT_TYPES_OPTICAL_CHANNEL =
"oc-opt-types:OPTICAL_CHANNEL";
private static final Logger log = getLogger(TerminalDeviceDiscovery.class);
/**
* Returns the NetconfSession with the device for which the method was called.
*
* @param deviceId device indetifier
*
* @return The netconf session or null
*/
private NetconfSession getNetconfSession(DeviceId deviceId) {
NetconfController controller = handler().get(NetconfController.class);
NetconfDevice ncdev = controller.getDevicesMap().get(deviceId);
if (ncdev == null) {
log.trace("No netconf device, returning null session");
return null;
}
return ncdev.getSession();
}
/**
* Get the deviceId for which the methods apply.
*
* @return The deviceId as contained in the handler data
*/
private DeviceId did() {
return handler().data().deviceId();
}
/**
* Get the device instance for which the methods apply.
*
* @return The device instance
*/
private Device getDevice() {
DeviceService deviceService = checkNotNull(handler().get(DeviceService.class));
Device device = deviceService.getDevice(did());
return device;
}
/**
* Construct a String with a Netconf filtered get RPC Message.
*
* @param filter A valid XML tree with the filter to apply in the get
* @return a String containing the RPC XML Document
*/
private String filteredGetBuilder(String filter) {
StringBuilder rpc = new StringBuilder(RPC_TAG_NETCONF_BASE);
rpc.append("<get>");
rpc.append("<filter type='subtree'>");
rpc.append(filter);
rpc.append("</filter>");
rpc.append("</get>");
rpc.append(RPC_CLOSE_TAG);
return rpc.toString();
}
/**
* Construct a String with a Netconf filtered get RPC Message.
*
* @param filter A valid XPath Expression with the filter to apply in the get
* @return a String containing the RPC XML Document
*
* Note: server must support xpath capability.
* <select=" /components/component[name='PORT-A-In-1']/properties/...
* ...property[name='onos-index']/config/value" type="xpath"/>
*/
private String xpathFilteredGetBuilder(String filter) {
StringBuilder rpc = new StringBuilder(RPC_TAG_NETCONF_BASE);
rpc.append("<get>");
rpc.append("<filter type='xpath' select=\"");
rpc.append(filter);
rpc.append("\"/>");
rpc.append("</get>");
rpc.append(RPC_CLOSE_TAG);
return rpc.toString();
}
/**
* Builds a request to get Device details, operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/component/state/type being oc-platform-types:OPERATING_SYSTEM
*/
private String getDeviceDetailsBuilder() {
StringBuilder filter = new StringBuilder();
filter.append("<components xmlns='http://openconfig.net/yang/platform'>");
filter.append(" <component>");
filter.append(" <state>");
filter.append(" <type xmlns:oc-platform-types='http://openconfig.net/");
filter.append("yang/platform-types'>oc-platform-types:OPERATING_SYSTEM</type>");
filter.append(" </state>");
filter.append(" </component>");
filter.append("</components>");
return filteredGetBuilder(filter.toString());
/* I am not sure the alternative method is more efficient
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.newDocument();
Element rpc = doc.createElementNS("urn:ietf:params:xml:ns:netconf:base:1.0", "rpc");
Element get = doc.createElement("get");
Element rpc = doc.createElement("rpc");
Element components = doc.createElementNS("http://openconfig.net/yang/platform", "components");
Element component = doc.createElement("component");
Element state = doc.createElement("state");
Element type = doc.createElement("type");
type.setAttributeNS("http://www.w3.org/2000/xmlns/",
"xmlns:oc-platform-types", "http://openconfig.net/yang/platform-types");
type.appendChild(doc.createTextNode("oc-platform-types:OPERATING_SYSTEM"));
state.appendChild(type);
component.appendChild(state);
components.appendChild(component);
rpc.appendChild(components);
get.appendChild(rpc);
rpc.appendChild(get);
doc.appendChild(rpc);
return NetconfRpcParserUtil.toString(doc);
} catch (Exception e) {
throw new RuntimeException(new NetconfException("Exception in getDeviceDetailsBuilder", e));
}
*/
}
/**
* Builds a request to get Device Components, config and operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/
*/
private String getDeviceComponentsBuilder() {
return filteredGetBuilder("<components xmlns='http://openconfig.net/yang/platform'/>");
}
/**
* Builds a request to get Device Ports, config and operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/component/state/type being oc-platform-types:PORT
*/
private String getDevicePortsBuilder() {
StringBuilder rpc = new StringBuilder();
rpc.append("<components xmlns='http://openconfig.net/yang/platform'>");
rpc.append(" <component><state>");
rpc.append(" <type xmlns:oc-platform-types='http://openconfig.net/");
rpc.append("yang/platform-types'>oc-platform-types:PORT</type>");
rpc.append(" </state></component>");
rpc.append("</components>");
return filteredGetBuilder(rpc.toString());
}
/**
* Returns a DeviceDescription with Device info.
*
* @return DeviceDescription or null
*
* //CHECKSTYLE:OFF
* <pre>{@code
* <data>
* <components xmlns="http://openconfig.net/yang/platform">
* <component>
* <state>
* <name>FIRMWARE</name>
* <type>oc-platform-types:OPERATING_SYSTEM</type>
* <description>CTTC METRO-HAUL Emulated OpenConfig TerminalDevice</description>
* <version>0.0.1</version>
* </state>
* </component>
* </components>
* </data>
*}</pre>
* //CHECKSTYLE:ON
*/
@Override
public DeviceDescription discoverDeviceDetails() {
log.info("TerminalDeviceDiscovery::discoverDeviceDetails device {}", did());
boolean defaultAvailable = true;
SparseAnnotations annotations = DefaultAnnotations.builder().build();
// Other option "OTHER", we use ROADM for now
org.onosproject.net.Device.Type type =
org.onosproject.net.Device.Type.ROADM;
// Some defaults
String vendor = "NOVENDOR";
String hwVersion = "0.1.1";
String swVersion = "0.1.1";
String serialNumber = "0xCAFEBEEF";
String chassisId = "128";
// Get the session,
NetconfSession session = getNetconfSession(did());
if (session != null) {
try {
String reply = session.get(getDeviceDetailsBuilder());
// <rpc-reply> as root node
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(reply);
vendor = xconf.getString("data/components/component/state/mfg-name", vendor);
serialNumber = xconf.getString("data/components/component/state/serial-no", serialNumber);
// Requires OpenConfig >= 2018
swVersion = xconf.getString("data/components/component/state/software-version", swVersion);
hwVersion = xconf.getString("data/components/component/state/hardware-version", hwVersion);
} catch (Exception e) {
throw new IllegalStateException(new NetconfException("Failed to retrieve version info.", e));
}
} else {
log.info("TerminalDeviceDiscovery::discoverDeviceDetails - No netconf session for {}", did());
}
log.info("VENDOR {}", vendor);
log.info("HWVERSION {}", hwVersion);
log.info("SWVERSION {}", swVersion);
log.info("SERIAL {}", serialNumber);
log.info("CHASSISID {}", chassisId);
ChassisId cid = new ChassisId(Long.valueOf(chassisId, 10));
return new DefaultDeviceDescription(did().uri(),
type, vendor, hwVersion, swVersion, serialNumber,
cid, defaultAvailable, annotations);
}
/**
* Returns a list of PortDescriptions for the device.
*
* @return a list of descriptions.
*
* The RPC reply follows the following pattern:
* //CHECKSTYLE:OFF
* <pre>{@code
* <?xml version="1.0" encoding="UTF-8"?>
* <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="7">
* <data>
* <components xmlns="http://openconfig.net/yang/platform">
* <component>....
* </component>
* <component>....
* </component>
* </components>
* </data>
* </rpc-reply>
* }</pre>
* //CHECKSTYLE:ON
*/
@Override
public List<PortDescription> discoverPortDetails() {
try {
NetconfSession session = getNetconfSession(did());
/*
Note: the method may get called before the netconf session is established
2018-05-24 14:01:43,607 | INFO
event NetworkConfigEvent{time=2018-05-24T14:01:43.602Z, type=CONFIG_ADDED, ....
configClass=class org.onosproject.netconf.config.NetconfDeviceConfig
2018-05-24 14:01:43,623 | INFO | vice-installer-2 | TerminalDeviceDiscovery
TerminalDeviceDiscovery::discoverPortDetails netconf:127.0.0.1:830
2018-05-24 14:01:43,624 | ERROR | vice-installer-2 | TerminalDeviceDiscovery
org.onosproject.onos-drivers-metrohaul - 1.14.0.SNAPSHOT | Exception discoverPortDetails()
2018-05-24 14:01:43,631 | INFO | vice-installer-1 | NetconfControllerImpl
Creating NETCONF session to netconf:127.0.0.1:830 with apache-mina
*/
if (session == null) {
log.error("discoverPortDetails called with null session for {}", did());
return ImmutableList.of();
}
CompletableFuture<String> fut = session.rpc(getDeviceComponentsBuilder());
String rpcReply = fut.get();
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(rpcReply);
xconf.setExpressionEngine(new XPathExpressionEngine());
HierarchicalConfiguration components = xconf.configurationAt("data/components");
return parsePorts(components);
} catch (Exception e) {
log.error("Exception discoverPortDetails() {}", did(), e);
return ImmutableList.of();
}
}
/**
* Parses port information from OpenConfig XML configuration.
*
* @param components the XML document with components root.
* @return List of ports
*
* //CHECKSTYLE:OFF
* <pre>{@code
* <components xmlns="http://openconfig.net/yang/platform">
* <component>....
* </component>
* <component>....
* </component>
* </components>
* }</pre>
* //CHECKSTYLE:ON
*/
protected List<PortDescription> parsePorts(HierarchicalConfiguration components) {
return components.configurationsAt("component")
.stream()
.filter(component -> {
return !component.getString("name", "unknown")
.equals("unknown") &&
component.getString("state/type", "unknown")
.equals(OC_PLATFORM_TYPES_PORT);
})
.map(component -> {
try {
// Pass the root document for cross-reference
return parsePortComponent(component, components);
} catch (Exception e) {
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
/**
* Checks if a given component has a subcomponent of a given type.
*
* @param component subtree to parse looking for subcomponents.
* @param components the full components tree, to cross-ref in
* case we need to check (sub)components' types.
*
* @return true or false
*/
private boolean hasSubComponentOfType(
HierarchicalConfiguration component,
HierarchicalConfiguration components,
String type) {
long count = component.configurationsAt("subcomponents/subcomponent")
.stream()
.filter(subcomponent -> {
String scName = subcomponent.getString("name");
StringBuilder sb = new StringBuilder("component[name='");
sb.append(scName);
sb.append("']/state/type");
String scType = components.getString(sb.toString(), "unknown");
return scType.equals(type);
})
.count();
return (count > 0);
}
/**
* Checks if a given component has a subcomponent of type OPTICAL_CHANNEL.
*
* @param component subtree to parse
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return true or false
*/
private boolean hasOpticalChannelSubComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
return hasSubComponentOfType(component, components,
OC_TRANSPORT_TYPES_OPTICAL_CHANNEL);
}
/**
* Checks if a given component has a subcomponent of type TRANSCEIVER.
*
* @param component subtree to parse
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return true or false
*/
private boolean hasTransceiverSubComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
return hasSubComponentOfType(component, components,
OC_PLATFORM_TYPES_TRANSCEIVER);
}
/**
* Parses a component XML doc into a PortDescription.
*
* @param component subtree to parse. It must be a component ot type PORT.
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return PortDescription or null if component does not have onos-index
*/
private PortDescription parsePortComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
Map<String, String> annotations = new HashMap<>();
String name = component.getString("name");
String type = component.getString("state/type");
log.info("Parsing Component {} type {}", name, type);
annotations.put(OdtnDeviceDescriptionDiscovery.OC_NAME, name);
annotations.put(OdtnDeviceDescriptionDiscovery.OC_TYPE, type);
// Store all properties as port properties
component.configurationsAt("properties/property")
.forEach(property -> {
String pn = property.getString("name");
String pv = property.getString("state/value");
annotations.put(pn, pv);
});
if (!annotations.containsKey(ONOS_PORT_INDEX)) {
log.warn("DEBUG: PORT {} does not include onos-index, skipping", name);
return null;
}
// The heuristic to know if it is client or line side
if (!annotations.containsKey(PORT_TYPE)) {
if (hasTransceiverSubComponent(component, components)) {
annotations.put(PORT_TYPE, OdtnPortType.CLIENT.value());
} else if (hasOpticalChannelSubComponent(component, components)) {
annotations.put(PORT_TYPE, OdtnPortType.LINE.value());
}
}
// Build the port
Builder builder = DefaultPortDescription.builder();
builder.withPortNumber(PortNumber.portNumber(
Long.parseLong(annotations.get(ONOS_PORT_INDEX)), name));
if (annotations.get(PORT_TYPE)
.equals(OdtnPortType.CLIENT.value())) {
log.info("Adding CLIENT port");
builder.type(Type.PACKET);
} else if (annotations.get(PORT_TYPE)
.equals(OdtnPortType.LINE.value())) {
log.info("Adding LINE port");
builder.type(Type.OCH);
} else {
log.info("Unknown port added as CLIENT port");
}
builder.annotations(DefaultAnnotations.builder().putAll(annotations).build());
return builder.build();
}
}
| drivers/odtn-driver/src/main/java/org/onosproject/drivers/odtn/openconfig/TerminalDeviceDiscovery.java | /*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This work was partially supported by EC H2020 project METRO-HAUL (761727).
*/
package org.onosproject.drivers.odtn.openconfig;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.slf4j.LoggerFactory.getLogger;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.concurrent.CompletableFuture;
import org.onlab.packet.ChassisId;
import org.apache.commons.configuration.HierarchicalConfiguration;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.commons.configuration.tree.xpath.XPathExpressionEngine;
import org.onosproject.drivers.utilities.XmlConfigParser;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.device.DeviceDescription;
import org.onosproject.net.device.DeviceDescriptionDiscovery;
import org.onosproject.net.device.DefaultDeviceDescription;
import org.onosproject.net.device.DefaultPortDescription;
import org.onosproject.net.device.DefaultPortDescription.Builder;
import org.onosproject.net.device.PortDescription;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.SparseAnnotations;
import org.onosproject.net.Port.Type;
import org.onosproject.net.PortNumber;
import org.onosproject.netconf.NetconfController;
import org.onosproject.netconf.NetconfDevice;
import org.onosproject.netconf.NetconfException;
import org.onosproject.netconf.NetconfSession;
import com.google.common.collect.ImmutableList;
import org.onosproject.odtn.behaviour.OdtnDeviceDescriptionDiscovery;
/**
* Driver Implementation of the DeviceDescrption discovery for OpenConfig
* terminal devices.
*
*/
public class TerminalDeviceDiscovery
extends AbstractHandlerBehaviour
implements OdtnDeviceDescriptionDiscovery, DeviceDescriptionDiscovery {
private static final String RPC_TAG_NETCONF_BASE =
"<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">";
private static final String RPC_CLOSE_TAG = "</rpc>";
private static final String OC_PLATFORM_TYPES_TRANSCEIVER =
"oc-platform-types:TRANSCEIVER";
private static final String OC_PLATFORM_TYPES_PORT =
"oc-platform-types:PORT";
private static final String OC_TRANSPORT_TYPES_OPTICAL_CHANNEL =
"oc-opt-types:OPTICAL_CHANNEL";
private static final Logger log = getLogger(TerminalDeviceDiscovery.class);
/**
* Returns the NetconfSession with the device for which the method was called.
*
* @param deviceId device indetifier
*
* @return The netconf session or null
*/
private NetconfSession getNetconfSession(DeviceId deviceId) {
NetconfController controller = handler().get(NetconfController.class);
NetconfDevice ncdev = controller.getDevicesMap().get(deviceId);
if (ncdev == null) {
log.trace("No netconf device, returning null session");
return null;
}
return ncdev.getSession();
}
/**
* Get the deviceId for which the methods apply.
*
* @return The deviceId as contained in the handler data
*/
private DeviceId did() {
return handler().data().deviceId();
}
/**
* Get the device instance for which the methods apply.
*
* @return The device instance
*/
private Device getDevice() {
DeviceService deviceService = checkNotNull(handler().get(DeviceService.class));
Device device = deviceService.getDevice(did());
return device;
}
/**
* Construct a String with a Netconf filtered get RPC Message.
*
* @param filter A valid XML tree with the filter to apply in the get
* @return a String containing the RPC XML Document
*/
private String filteredGetBuilder(String filter) {
StringBuilder rpc = new StringBuilder(RPC_TAG_NETCONF_BASE);
rpc.append("<get>");
rpc.append("<filter type='subtree'>");
rpc.append(filter);
rpc.append("</filter>");
rpc.append("</get>");
rpc.append(RPC_CLOSE_TAG);
return rpc.toString();
}
/**
* Construct a String with a Netconf filtered get RPC Message.
*
* @param filter A valid XPath Expression with the filter to apply in the get
* @return a String containing the RPC XML Document
*
* Note: server must support xpath capability.
* <select=" /components/component[name='PORT-A-In-1']/properties/...
* ...property[name='onos-index']/config/value" type="xpath"/>
*/
private String xpathFilteredGetBuilder(String filter) {
StringBuilder rpc = new StringBuilder(RPC_TAG_NETCONF_BASE);
rpc.append("<get>");
rpc.append("<filter type='xpath' select=\"");
rpc.append(filter);
rpc.append("\"/>");
rpc.append("</get>");
rpc.append(RPC_CLOSE_TAG);
return rpc.toString();
}
/**
* Builds a request to get Device details, operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/component/state/type being oc-platform-types:OPERATING_SYSTEM
*/
private String getDeviceDetailsBuilder() {
StringBuilder filter = new StringBuilder();
filter.append("<components xmlns='http://openconfig.net/yang/platform'>");
filter.append(" <component>");
filter.append(" <state>");
filter.append(" <type xmlns:oc-platform-types='http://openconfig.net/");
filter.append("yang/platform-types'>oc-platform-types:OPERATING_SYSTEM</type>");
filter.append(" </state>");
filter.append(" </component>");
filter.append("</components>");
return filteredGetBuilder(filter.toString());
/* I am not sure the alternative method is more efficient
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.newDocument();
Element rpc = doc.createElementNS("urn:ietf:params:xml:ns:netconf:base:1.0", "rpc");
Element get = doc.createElement("get");
Element rpc = doc.createElement("rpc");
Element components = doc.createElementNS("http://openconfig.net/yang/platform", "components");
Element component = doc.createElement("component");
Element state = doc.createElement("state");
Element type = doc.createElement("type");
type.setAttributeNS("http://www.w3.org/2000/xmlns/",
"xmlns:oc-platform-types", "http://openconfig.net/yang/platform-types");
type.appendChild(doc.createTextNode("oc-platform-types:OPERATING_SYSTEM"));
state.appendChild(type);
component.appendChild(state);
components.appendChild(component);
rpc.appendChild(components);
get.appendChild(rpc);
rpc.appendChild(get);
doc.appendChild(rpc);
return NetconfRpcParserUtil.toString(doc);
} catch (Exception e) {
throw new RuntimeException(new NetconfException("Exception in getDeviceDetailsBuilder", e));
}
*/
}
/**
* Builds a request to get Device Components, config and operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/
*/
private String getDeviceComponentsBuilder() {
return filteredGetBuilder("<components xmlns='http://openconfig.net/yang/platform'/>");
}
/**
* Builds a request to get Device Ports, config and operational data.
*
* @return A string with the Netconf RPC for a get with subtree rpcing based on
* /components/component/state/type being oc-platform-types:PORT
*/
private String getDevicePortsBuilder() {
StringBuilder rpc = new StringBuilder();
rpc.append("<components xmlns='http://openconfig.net/yang/platform'>");
rpc.append(" <component><state>");
rpc.append(" <type xmlns:oc-platform-types='http://openconfig.net/");
rpc.append("yang/platform-types'>oc-platform-types:PORT</type>");
rpc.append(" </state></component>");
rpc.append("</components>");
return filteredGetBuilder(rpc.toString());
}
/**
* Returns a DeviceDescription with Device info.
*
* @return DeviceDescription or null
*
* //CHECKSTYLE:OFF
* <pre>{@code
* <data>
* <components xmlns="http://openconfig.net/yang/platform">
* <component>
* <state>
* <name>FIRMWARE</name>
* <type>oc-platform-types:OPERATING_SYSTEM</type>
* <description>CTTC METRO-HAUL Emulated OpenConfig TerminalDevice</description>
* <version>0.0.1</version>
* </state>
* </component>
* </components>
* </data>
*}</pre>
* //CHECKSTYLE:ON
*/
@Override
public DeviceDescription discoverDeviceDetails() {
log.info("TerminalDeviceDiscovery::discoverDeviceDetails device {}", did());
boolean defaultAvailable = true;
SparseAnnotations annotations = DefaultAnnotations.builder().build();
// Other option "OTHER", we use ROADM for now
org.onosproject.net.Device.Type type =
org.onosproject.net.Device.Type.ROADM;
// Some defaults
String vendor = "NOVENDOR";
String hwVersion = "0.1.1";
String swVersion = "0.1.1";
String serialNumber = "0xCAFEBEEF";
String chassisId = "128";
// Get the session,
NetconfSession session = getNetconfSession(did());
if (session != null) {
try {
String reply = session.get(getDeviceDetailsBuilder());
// <rpc-reply> as root node
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(reply);
vendor = xconf.getString("data/components/component/state/mfg-name", vendor);
serialNumber = xconf.getString("data/components/component/state/serial-no", serialNumber);
// Requires OpenConfig >= 2018
swVersion = xconf.getString("data/components/component/state/software-version", swVersion);
hwVersion = xconf.getString("data/components/component/state/hardware-version", hwVersion);
} catch (Exception e) {
throw new RuntimeException(new NetconfException("Failed to retrieve version info.", e));
}
} else {
log.info("TerminalDeviceDiscovery::discoverDeviceDetails - No netconf session for {}", did());
}
log.info("VENDOR {}", vendor);
log.info("HWVERSION {}", hwVersion);
log.info("SWVERSION {}", swVersion);
log.info("SERIAL {}", serialNumber);
log.info("CHASSISID {}", chassisId);
ChassisId cid = new ChassisId(Long.valueOf(chassisId, 10));
return new DefaultDeviceDescription(did().uri(),
type, vendor, hwVersion, swVersion, serialNumber,
cid, defaultAvailable, annotations);
}
/**
* Returns a list of PortDescriptions for the device.
*
* @return a list of descriptions.
*
* The RPC reply follows the following pattern:
* //CHECKSTYLE:OFF
* <pre>{@code
* <?xml version="1.0" encoding="UTF-8"?>
* <rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="7">
* <data>
* <components xmlns="http://openconfig.net/yang/platform">
* <component>....
* </component>
* <component>....
* </component>
* </components>
* </data>
* </rpc-reply>
* }</pre>
* //CHECKSTYLE:ON
*/
@Override
public List<PortDescription> discoverPortDetails() {
try {
NetconfSession session = getNetconfSession(did());
/*
Note: the method may get called before the netconf session is established
2018-05-24 14:01:43,607 | INFO
event NetworkConfigEvent{time=2018-05-24T14:01:43.602Z, type=CONFIG_ADDED, ....
configClass=class org.onosproject.netconf.config.NetconfDeviceConfig
2018-05-24 14:01:43,623 | INFO | vice-installer-2 | TerminalDeviceDiscovery
TerminalDeviceDiscovery::discoverPortDetails netconf:127.0.0.1:830
2018-05-24 14:01:43,624 | ERROR | vice-installer-2 | TerminalDeviceDiscovery
org.onosproject.onos-drivers-metrohaul - 1.14.0.SNAPSHOT | Exception discoverPortDetails()
2018-05-24 14:01:43,631 | INFO | vice-installer-1 | NetconfControllerImpl
Creating NETCONF session to netconf:127.0.0.1:830 with apache-mina
*/
if (session == null) {
log.error("discoverPortDetails called with null session for {}", did());
return ImmutableList.of();
}
CompletableFuture<String> fut = session.rpc(getDeviceComponentsBuilder());
String rpcReply = fut.get();
XMLConfiguration xconf = (XMLConfiguration) XmlConfigParser.loadXmlString(rpcReply);
xconf.setExpressionEngine(new XPathExpressionEngine());
HierarchicalConfiguration components = xconf.configurationAt("data/components");
return parsePorts(components);
} catch (Exception e) {
log.error("Exception discoverPortDetails() {}", did(), e);
return ImmutableList.of();
}
}
/**
* Parses port information from OpenConfig XML configuration.
*
* @param components the XML document with components root.
* @return List of ports
*
* //CHECKSTYLE:OFF
* <pre>{@code
* <components xmlns="http://openconfig.net/yang/platform">
* <component>....
* </component>
* <component>....
* </component>
* </components>
* }</pre>
* //CHECKSTYLE:ON
*/
protected List<PortDescription> parsePorts(HierarchicalConfiguration components) {
return components.configurationsAt("component")
.stream()
.filter(component -> {
return !component.getString("name", "unknown")
.equals("unknown") &&
component.getString("state/type", "unknown")
.equals(OC_PLATFORM_TYPES_PORT);
})
.map(component -> {
try {
// Pass the root document for cross-reference
return parsePortComponent(component, components);
} catch (Exception e) {
return null;
}
})
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
/**
* Checks if a given component has a subcomponent of a given type.
*
* @param component subtree to parse looking for subcomponents.
* @param components the full components tree, to cross-ref in
* case we need to check (sub)components' types.
*
* @return true or false
*/
private boolean hasSubComponentOfType(
HierarchicalConfiguration component,
HierarchicalConfiguration components,
String type) {
long count = component.configurationsAt("subcomponents/subcomponent")
.stream()
.filter(subcomponent -> {
String scName = subcomponent.getString("name");
StringBuilder sb = new StringBuilder("component[name='");
sb.append(scName);
sb.append("']/state/type");
String scType = components.getString(sb.toString(), "unknown");
return scType.equals(type);
})
.count();
return (count > 0);
}
/**
* Checks if a given component has a subcomponent of type OPTICAL_CHANNEL.
*
* @param component subtree to parse
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return true or false
*/
private boolean hasOpticalChannelSubComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
return hasSubComponentOfType(component, components,
OC_TRANSPORT_TYPES_OPTICAL_CHANNEL);
}
/**
* Checks if a given component has a subcomponent of type TRANSCEIVER.
*
* @param component subtree to parse
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return true or false
*/
private boolean hasTransceiverSubComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
return hasSubComponentOfType(component, components,
OC_PLATFORM_TYPES_TRANSCEIVER);
}
/**
* Parses a component XML doc into a PortDescription.
*
* @param component subtree to parse. It must be a component ot type PORT.
* @param components the full components tree, to cross-ref in
* case we need to check transceivers or optical channels.
*
* @return PortDescription or null if component does not have onos-index
*/
private PortDescription parsePortComponent(
HierarchicalConfiguration component,
HierarchicalConfiguration components) {
Map<String, String> annotations = new HashMap<>();
String name = component.getString("name");
String type = component.getString("state/type");
log.info("Parsing Component {} type {}", name, type);
annotations.put(OdtnDeviceDescriptionDiscovery.OC_NAME, name);
annotations.put(OdtnDeviceDescriptionDiscovery.OC_TYPE, type);
// Store all properties as port properties
component.configurationsAt("properties/property")
.forEach(property -> {
String pn = property.getString("name");
String pv = property.getString("state/value");
annotations.put(pn, pv);
});
if (!annotations.containsKey(ONOS_PORT_INDEX)) {
log.warn("DEBUG: PORT {} does not include onos-index, skipping", name);
return null;
}
// The heuristic to know if it is client or line side
if (!annotations.containsKey(PORT_TYPE)) {
if (hasTransceiverSubComponent(component, components)) {
annotations.put(PORT_TYPE, OdtnPortType.CLIENT.value());
} else if (hasOpticalChannelSubComponent(component, components)) {
annotations.put(PORT_TYPE, OdtnPortType.LINE.value());
}
}
// Build the port
Builder builder = DefaultPortDescription.builder();
builder.withPortNumber(PortNumber.portNumber(
Long.parseLong(annotations.get(ONOS_PORT_INDEX)), name));
if (annotations.get(PORT_TYPE)
.equals(OdtnPortType.CLIENT.value())) {
log.info("Adding CLIENT port");
builder.type(Type.PACKET);
} else if (annotations.get(PORT_TYPE)
.equals(OdtnPortType.LINE.value())) {
log.info("Adding LINE port");
builder.type(Type.OCH);
} else {
log.info("Unknown port added as CLIENT port");
}
builder.annotations(DefaultAnnotations.builder().putAll(annotations).build());
return builder.build();
}
}
| SONAR fix - don't use the generic RuntimeException
Change-Id: Ibe65b2489f5962c2878350265ec76c7eaae70ce6
| drivers/odtn-driver/src/main/java/org/onosproject/drivers/odtn/openconfig/TerminalDeviceDiscovery.java | SONAR fix - don't use the generic RuntimeException | <ide><path>rivers/odtn-driver/src/main/java/org/onosproject/drivers/odtn/openconfig/TerminalDeviceDiscovery.java
<ide> swVersion = xconf.getString("data/components/component/state/software-version", swVersion);
<ide> hwVersion = xconf.getString("data/components/component/state/hardware-version", hwVersion);
<ide> } catch (Exception e) {
<del> throw new RuntimeException(new NetconfException("Failed to retrieve version info.", e));
<add> throw new IllegalStateException(new NetconfException("Failed to retrieve version info.", e));
<ide> }
<ide> } else {
<ide> log.info("TerminalDeviceDiscovery::discoverDeviceDetails - No netconf session for {}", did()); |
|
Java | apache-2.0 | a77422979aebd904578d979c37f6f5f3dcfcab2b | 0 | sunny256/crate,puneetjaiswal/crate,EvilMcJerkface/crate,adrpar/crate,crate/crate,puneetjaiswal/crate,crate/crate,aslanbekirov/crate,husky-koglhof/crate,gmrodrigues/crate,gmrodrigues/crate,aslanbekirov/crate,husky-koglhof/crate,adrpar/crate,crate/crate,aslanbekirov/crate,EvilMcJerkface/crate,puneetjaiswal/crate,EvilMcJerkface/crate,husky-koglhof/crate,adrpar/crate,gmrodrigues/crate,sunny256/crate,sunny256/crate | package io.crate.planner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.crate.analyze.Analysis;
import io.crate.analyze.Analyzer;
import io.crate.analyze.WhereClause;
import io.crate.metadata.MetaDataModule;
import io.crate.metadata.Routing;
import io.crate.metadata.TableIdent;
import io.crate.metadata.doc.DocSchemaInfo;
import io.crate.metadata.sys.MetaDataSysModule;
import io.crate.metadata.sys.SysClusterTableInfo;
import io.crate.metadata.sys.SysNodesTableInfo;
import io.crate.metadata.sys.SysShardsTableInfo;
import io.crate.metadata.table.SchemaInfo;
import io.crate.metadata.table.TableInfo;
import io.crate.metadata.table.TestingTableInfo;
import io.crate.operator.aggregation.impl.AggregationImplModule;
import io.crate.operator.operator.OperatorModule;
import io.crate.operator.scalar.ScalarFunctionModule;
import io.crate.planner.node.*;
import io.crate.planner.projection.AggregationProjection;
import io.crate.planner.projection.GroupProjection;
import io.crate.planner.projection.Projection;
import io.crate.planner.projection.TopNProjection;
import io.crate.planner.symbol.*;
import io.crate.sql.parser.SqlParser;
import io.crate.sql.tree.Statement;
import org.cratedb.DataType;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.junit.Before;
import org.junit.Test;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import static junit.framework.Assert.assertTrue;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class PlannerTest {
static {
ClassLoader.getSystemClassLoader().setDefaultAssertionStatus(true);
}
private Injector injector;
private Analyzer analyzer;
private Planner planner = new Planner();
Routing shardRouting = new Routing(ImmutableMap.<String, Map<String, Set<Integer>>>builder()
.put("nodeOne", ImmutableMap.<String, Set<Integer>>of("t1", ImmutableSet.of(1, 2)))
.put("nodeTow", ImmutableMap.<String, Set<Integer>>of("t1", ImmutableSet.of(3, 4)))
.build());
Routing nodesRouting = new Routing(ImmutableMap.<String, Map<String, Set<Integer>>>builder()
.put("nodeOne", ImmutableMap.<String, Set<Integer>>of())
.put("nodeTwo", ImmutableMap.<String, Set<Integer>>of())
.build());
class TestClusterTableInfo extends SysClusterTableInfo {
// granularity < DOC is already handled different
// here we want a table with handlerSideRouting and DOC granularity.
@Override
public RowGranularity rowGranularity() {
return RowGranularity.DOC;
}
}
class TestShardsTableInfo extends SysShardsTableInfo {
public TestShardsTableInfo() {
super(null);
}
@Override
public Routing getRouting(WhereClause whereClause) {
return shardRouting;
}
}
class TestNodesTableInfo extends SysNodesTableInfo {
public TestNodesTableInfo() {
super(null);
}
@Override
public Routing getRouting(WhereClause whereClause) {
return nodesRouting;
}
}
class TestSysModule extends MetaDataSysModule {
@Override
protected void bindTableInfos() {
tableInfoBinder.addBinding(TestNodesTableInfo.IDENT.name()).toInstance(
new TestNodesTableInfo());
tableInfoBinder.addBinding(TestShardsTableInfo.IDENT.name()).toInstance(
new TestShardsTableInfo());
tableInfoBinder.addBinding(TestClusterTableInfo.IDENT.name()).toInstance(
new TestClusterTableInfo());
}
}
class TestModule extends MetaDataModule {
@Override
protected void configure() {
ClusterService clusterService = mock(ClusterService.class);
bind(ClusterService.class).toInstance(clusterService);
super.configure();
}
@Override
protected void bindSchemas() {
super.bindSchemas();
SchemaInfo schemaInfo = mock(SchemaInfo.class);
TableIdent userTableIdent = new TableIdent(null, "users");
TableInfo userTableInfo = TestingTableInfo.builder(userTableIdent, RowGranularity.DOC, shardRouting)
.add("name", DataType.STRING, null)
.add("id", DataType.LONG, null)
.addPrimaryKey("id")
.build();
TableIdent charactersTableIdent = new TableIdent(null, "characters");
TableInfo charactersTableInfo = TestingTableInfo.builder(charactersTableIdent, RowGranularity.DOC, shardRouting)
.add("name", DataType.STRING, null)
.add("id", DataType.STRING, null)
.addPrimaryKey("id")
.build();
when(schemaInfo.getTableInfo(charactersTableIdent.name())).thenReturn(charactersTableInfo);
when(schemaInfo.getTableInfo(userTableIdent.name())).thenReturn(userTableInfo);
schemaBinder.addBinding(DocSchemaInfo.NAME).toInstance(schemaInfo);
}
}
@Before
public void setUp() throws Exception {
injector = new ModulesBuilder()
.add(new TestModule())
.add(new TestSysModule())
.add(new AggregationImplModule())
.add(new ScalarFunctionModule())
.add(new OperatorModule())
.createInjector();
analyzer = injector.getInstance(Analyzer.class);
}
private Plan plan(String statement) {
return planner.plan(analyzer.analyze(SqlParser.createStatement(statement)));
}
@Test
public void testGroupByWithAggregationStringLiteralArguments() {
Plan plan = plan("select count('foo'), name from users group by name");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode) iterator.next();
// TODO: optimize to not collect literal
//assertThat(collectNode.toCollect().size(), is(1));
GroupProjection groupProjection = (GroupProjection) collectNode.projections().get(0);
Aggregation aggregation = groupProjection.values().get(0);
//assertTrue(aggregation.inputs().get(0).symbolType().isLiteral());
}
@Test
public void testGroupByWithAggregationPlan() throws Exception {
Plan plan = plan("select count(*), name from users group by name");
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// distributed collect
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.downStreamNodes().size(), is(2));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.DOC));
assertThat(collectNode.executionNodes().size(), is(2));
assertThat(collectNode.toCollect().size(), is(1));
assertThat(collectNode.projections().size(), is(1));
assertThat(collectNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(collectNode.outputTypes().size(), is(2));
assertThat(collectNode.outputTypes().get(0), is(DataType.STRING));
assertThat(collectNode.outputTypes().get(1), is(DataType.NULL));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.numUpstreams(), is(2));
assertThat(mergeNode.executionNodes().size(), is(2));
assertEquals(mergeNode.inputTypes(), collectNode.outputTypes());
assertThat(mergeNode.projections().size(), is(1));
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
GroupProjection groupProjection = (GroupProjection) mergeNode.projections().get(0);
InputColumn inputColumn = (InputColumn) groupProjection.values().get(0).inputs().get(0);
assertThat(inputColumn.index(), is(1));
assertThat(mergeNode.outputTypes().size(), is(2));
assertThat(mergeNode.outputTypes().get(0), is(DataType.STRING));
assertThat(mergeNode.outputTypes().get(1), is(DataType.LONG));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode localMerge = (MergeNode) planNode;
assertThat(localMerge.numUpstreams(), is(2));
assertTrue(localMerge.executionNodes().isEmpty());
assertEquals(mergeNode.outputTypes(), localMerge.inputTypes());
assertThat(localMerge.projections().get(0), instanceOf(TopNProjection.class));
TopNProjection topN = (TopNProjection) localMerge.projections().get(0);
assertThat(topN.outputs().size(), is(2));
// groupProjection changes output to keys, aggregations
// topN needs to swap the outputs back
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(1));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(0));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGetPlan() throws Exception {
Plan plan = plan("select name from users where id = 1");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.ids().get(0), is("1"));
assertFalse(iterator.hasNext());
assertThat(node.outputs().size(), is(1));
}
@Test
public void testGetPlanStringLiteral() throws Exception {
Plan plan = plan("select name from characters where id = 'one'");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("characters"));
assertThat(node.ids().get(0), is("one"));
assertFalse(iterator.hasNext());
assertThat(node.outputs().size(), is(1));
}
@Test
public void testMultiGetPlan() throws Exception {
Plan plan = plan("select name from users where id in (1, 2)");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.ids().size(), is(2));
assertThat(node.ids().get(0), is("1"));
assertThat(node.ids().get(1), is("2"));
}
@Test
public void testDeletePlan() throws Exception {
Plan plan = plan("delete from users where id = 1");
Iterator<PlanNode> iterator = plan.iterator();
ESDeleteNode node = (ESDeleteNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.id(), is("1"));
assertFalse(iterator.hasNext());
}
@Test
public void testMultiDeletePlan() throws Exception {
Plan plan = plan("delete from users where id in (1, 2)");
Iterator<PlanNode> iterator = plan.iterator();
assertThat(iterator.next(), instanceOf(ESDeleteByQueryNode.class));
}
@Test
public void testGroupByWithAggregationAndLimit() throws Exception {
Plan plan = plan("select count(*), name from users group by name limit 1 offset 1");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
planNode = iterator.next();
// distributed merge
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(mergeNode.projections().get(1), instanceOf(TopNProjection.class));
// limit must include offset because the real limit can only be applied on the handler
// after all rows have been gathered.
TopNProjection topN = (TopNProjection) mergeNode.projections().get(1);
assertThat(topN.limit(), is(2));
assertThat(topN.offset(), is(0));
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(1));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(0));
// local merge
planNode = iterator.next();
assertThat(planNode.projections().get(0), instanceOf(TopNProjection.class));
topN = (TopNProjection) planNode.projections().get(0);
assertThat(topN.limit(), is(1));
assertThat(topN.offset(), is(1));
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(0));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(1));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGlobalAggregationPlan() throws Exception {
String statementString = "select count(name) from users";
Statement statement = SqlParser.createStatement(statementString);
Analysis analysis = analyzer.analyze(statement);
Plan plan = planner.plan(analysis);
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.outputTypes().get(0), is(DataType.NULL));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.DOC));
assertThat(collectNode.projections().size(), is(1));
assertThat(collectNode.projections().get(0), instanceOf(AggregationProjection.class));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.inputTypes().get(0), is(DataType.NULL));
assertThat(mergeNode.outputTypes().get(0), is(DataType.LONG));
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGroupByOnNodeLevel() throws Exception {
Plan plan = plan("select count(*), name from sys.nodes group by name");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode) iterator.next();
assertFalse(collectNode.hasDownstreams());
assertThat(collectNode.outputTypes().get(0), is(DataType.STRING));
assertThat(collectNode.outputTypes().get(1), is(DataType.NULL));
MergeNode mergeNode = (MergeNode) iterator.next();
assertThat(mergeNode.numUpstreams(), is(2));
assertThat(mergeNode.projections().size(), is(2));
assertThat(mergeNode.outputTypes().get(0), is(DataType.LONG));
assertThat(mergeNode.outputTypes().get(1), is(DataType.STRING));
GroupProjection groupProjection = (GroupProjection) mergeNode.projections().get(0);
assertThat(groupProjection.keys().size(), is(1));
assertThat(((InputColumn) groupProjection.outputs().get(0)).index(), is(0));
assertThat(groupProjection.outputs().get(1), is(instanceOf(Aggregation.class)));
assertThat(((Aggregation)groupProjection.outputs().get(1)).functionIdent().name(), is("count"));
assertThat(((Aggregation)groupProjection.outputs().get(1)).fromStep(), is(Aggregation.Step.PARTIAL));
assertThat(((Aggregation)groupProjection.outputs().get(1)).toStep(), is(Aggregation.Step.FINAL));
TopNProjection projection = (TopNProjection) mergeNode.projections().get(1);
assertThat(((InputColumn) projection.outputs().get(0)).index(), is(1));
assertThat(((InputColumn) projection.outputs().get(1)).index(), is(0));
assertFalse(iterator.hasNext());
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testShardPlan() throws Exception {
Plan plan = plan("select id from sys.shards order by id limit 10");
// TODO: add where clause
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.outputTypes().get(0), is(DataType.INTEGER));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.SHARD));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.inputTypes().size(), is(1));
assertThat(mergeNode.inputTypes().get(0), is(DataType.INTEGER));
assertThat(mergeNode.outputTypes().size(), is(1));
assertThat(mergeNode.outputTypes().get(0), is(DataType.INTEGER));
assertThat(mergeNode.numUpstreams(), is(2));
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testESSearchPlan() throws Exception {
Plan plan = plan("select name from users where name = 'x' order by id limit 10");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESSearchNode.class));
ESSearchNode searchNode = (ESSearchNode) planNode;
assertThat(searchNode.outputTypes().size(), is(1));
assertThat(searchNode.outputTypes().get(0), is(DataType.STRING));
assertTrue(searchNode.whereClause().hasQuery());
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testESIndexPlan() throws Exception {
Plan plan = plan("insert into users (id, name) values (42, 'Deep Thought')");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESIndexNode.class));
ESIndexNode indexNode = (ESIndexNode) planNode;
assertThat(indexNode.columns().size(), is(2));
assertThat(indexNode.columns().get(0).valueType(), is(DataType.LONG));
assertThat(indexNode.columns().get(0).info().ident().columnIdent().name(), is("id"));
assertThat(indexNode.columns().get(1).valueType(), is(DataType.STRING));
assertThat(indexNode.columns().get(1).info().ident().columnIdent().name(), is("name"));
assertThat(indexNode.valuesLists().size(), is(1));
assertThat(((LongLiteral) indexNode.valuesLists().get(0).get(0)).value(), is(42l));
assertThat(((StringLiteral) indexNode.valuesLists().get(0).get(1)).value().utf8ToString(), is("Deep Thought"));
assertThat(indexNode.outputTypes().size(), is(1));
assertThat(indexNode.outputTypes().get(0), is(DataType.LONG));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testESIndexPlanMultipleValues() throws Exception {
Plan plan = plan("insert into users (id, name) values (42, 'Deep Thought'), (99, 'Marvin')");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESIndexNode.class));
ESIndexNode indexNode = (ESIndexNode) planNode;
assertThat(indexNode.valuesLists().size(), is(2));
assertThat(((LongLiteral) indexNode.valuesLists().get(0).get(0)).value(), is(42l));
assertThat(((StringLiteral) indexNode.valuesLists().get(0).get(1)).value().utf8ToString(), is("Deep Thought"));
assertThat(((LongLiteral) indexNode.valuesLists().get(1).get(0)).value(), is(99l));
assertThat(((StringLiteral) indexNode.valuesLists().get(1).get(1)).value().utf8ToString(), is("Marvin"));
assertThat(indexNode.outputTypes().size(), is(1));
assertThat(indexNode.outputTypes().get(0), is(DataType.LONG));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testCountDistinctPlan() throws Exception {
Plan plan = plan("select count(distinct name) from users");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode)iterator.next();
Projection projection = collectNode.projections().get(0);
assertThat(projection, instanceOf(AggregationProjection.class));
AggregationProjection aggregationProjection = (AggregationProjection)projection;
assertThat(aggregationProjection.aggregations().size(), is(1));
Aggregation aggregation = aggregationProjection.aggregations().get(0);
assertThat(aggregation.toStep(), is(Aggregation.Step.PARTIAL));
Symbol aggregationInput = aggregation.inputs().get(0);
assertThat(aggregationInput.symbolType(), is(SymbolType.INPUT_COLUMN));
assertThat(collectNode.toCollect().get(0), instanceOf(Reference.class));
assertThat(((Reference)collectNode.toCollect().get(0)).info().ident().columnIdent().name(), is("name"));
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(2));
Projection projection1 = mergeNode.projections().get(1);
assertThat(projection1, instanceOf(TopNProjection.class));
Symbol collection_count = projection1.outputs().get(0);
assertThat(collection_count, instanceOf(Function.class));
}
@Test
public void testGroupByWithOrderOnAggregate() throws Exception {
Plan plan = plan("select count(*), name from users group by name order by count(*)");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode)iterator.next();
// reducer
iterator.next();
// sort is on handler because there is no limit/offset
// handler
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(1));
TopNProjection topNProjection = (TopNProjection)mergeNode.projections().get(0);
Symbol orderBy = topNProjection.orderBy().get(0);
assertThat(orderBy, instanceOf(InputColumn.class));
// points to the first values() entry of the previous GroupProjection
assertThat(((InputColumn) orderBy).index(), is(1));
}
@Test
public void testHandlerSideRouting() throws Exception {
Plan plan = plan("select * from sys.cluster");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// just testing the dispatching here.. making sure it is not a ESSearchNode
assertThat(planNode, instanceOf(CollectNode.class));
}
@Test
public void testHandlerSideRoutingGroupBy() throws Exception {
Plan plan = plan("select count(*) from sys.cluster group by name");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// just testing the dispatching here.. making sure it is not a ESSearchNode
assertThat(planNode, instanceOf(CollectNode.class));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
// no distributed merge, only 1 mergeNode
assertFalse(iterator.hasNext());
}
@Test
public void testCountDistinctWithGroupBy() throws Exception {
Plan plan = plan("select count(distinct id), name from users group by name order by count(distinct id)");
Iterator<PlanNode> iterator = plan.iterator();
// collect
CollectNode collectNode = (CollectNode)iterator.next();
assertThat(collectNode.toCollect().get(0), instanceOf(Reference.class));
assertThat(collectNode.toCollect().size(), is(2));
assertThat(((Reference)collectNode.toCollect().get(1)).info().ident().columnIdent().name(), is("id"));
assertThat(((Reference)collectNode.toCollect().get(0)).info().ident().columnIdent().name(), is("name"));
Projection projection = collectNode.projections().get(0);
assertThat(projection, instanceOf(GroupProjection.class));
GroupProjection groupProjection = (GroupProjection)projection;
Symbol groupKey = groupProjection.keys().get(0);
assertThat(groupKey, instanceOf(InputColumn.class));
assertThat(((InputColumn)groupKey).index(), is(0));
assertThat(groupProjection.values().size(), is(1));
Aggregation aggregation = groupProjection.values().get(0);
assertThat(aggregation.toStep(), is(Aggregation.Step.PARTIAL));
Symbol aggregationInput = aggregation.inputs().get(0);
assertThat(aggregationInput.symbolType(), is(SymbolType.INPUT_COLUMN));
// reducer
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(2));
Projection groupProjection1 = mergeNode.projections().get(0);
assertThat(groupProjection1, instanceOf(GroupProjection.class));
groupProjection = (GroupProjection)groupProjection1;
assertThat(groupProjection.keys().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn)groupProjection.keys().get(0)).index(), is(0));
assertThat(groupProjection.values().get(0), instanceOf(Aggregation.class));
Aggregation aggregationStep2 = groupProjection.values().get(0);
assertThat(aggregationStep2.toStep(), is(Aggregation.Step.FINAL));
TopNProjection topNProjection = (TopNProjection)mergeNode.projections().get(1);
Symbol collection_count = topNProjection.outputs().get(0);
assertThat(collection_count, instanceOf(Function.class));
// handler
MergeNode localMergeNode = (MergeNode)iterator.next();
assertThat(localMergeNode.projections().size(), is(1));
Projection localTopN = localMergeNode.projections().get(0);
assertThat(localTopN, instanceOf(TopNProjection.class));
}
@Test
public void testESUpdatePlan() throws Exception {
Plan plan = plan("update users set name='Vogon lyric fan' where id=1");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESUpdateNode.class));
ESUpdateNode updateNode = (ESUpdateNode)planNode;
assertThat(updateNode.index(), is("users"));
assertThat(updateNode.primaryKeyValues().length, is(1));
assertThat(updateNode.primaryKeyValues()[0], is("1"));
assertThat(updateNode.outputTypes().size(), is(1));
assertThat(updateNode.outputTypes().get(0), is(DataType.LONG));
Map.Entry<String, Object> entry = updateNode.updateDoc().entrySet().iterator().next();
assertThat(entry.getKey(), is("name"));
assertThat((String)entry.getValue(), is("Vogon lyric fan"));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testESUpdatePlanWithMultiplePrimaryKeyValues() throws Exception {
Plan plan = plan("update users set name='Vogon lyric fan' where id in (1,2,3)");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESUpdateNode.class));
ESUpdateNode updateNode = (ESUpdateNode)planNode;
assertThat(updateNode.primaryKeyValues().length, is(3));
assertThat(updateNode.primaryKeyValues(), arrayContainingInAnyOrder("1", "2", "3"));
}
@Test
public void testCopyFromPlan() throws Exception {
Plan plan = plan("copy users from '/path/to/file.extension'");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CopyNode.class));
CopyNode copyNode = (CopyNode)planNode;
assertThat(copyNode.index(), is("users"));
assertThat(copyNode.path(), is("/path/to/file.extension"));
}
}
| sql/src/test/java/io/crate/planner/PlannerTest.java | package io.crate.planner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.crate.analyze.Analysis;
import io.crate.analyze.Analyzer;
import io.crate.analyze.WhereClause;
import io.crate.metadata.MetaDataModule;
import io.crate.metadata.Routing;
import io.crate.metadata.TableIdent;
import io.crate.metadata.doc.DocSchemaInfo;
import io.crate.metadata.sys.MetaDataSysModule;
import io.crate.metadata.sys.SysClusterTableInfo;
import io.crate.metadata.sys.SysNodesTableInfo;
import io.crate.metadata.sys.SysShardsTableInfo;
import io.crate.metadata.table.SchemaInfo;
import io.crate.metadata.table.TableInfo;
import io.crate.metadata.table.TestingTableInfo;
import io.crate.operator.aggregation.impl.AggregationImplModule;
import io.crate.operator.operator.OperatorModule;
import io.crate.operator.scalar.ScalarFunctionModule;
import io.crate.planner.node.*;
import io.crate.planner.projection.AggregationProjection;
import io.crate.planner.projection.GroupProjection;
import io.crate.planner.projection.Projection;
import io.crate.planner.projection.TopNProjection;
import io.crate.planner.symbol.*;
import io.crate.sql.parser.SqlParser;
import io.crate.sql.tree.Statement;
import org.cratedb.DataType;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.junit.Before;
import org.junit.Test;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import static junit.framework.Assert.assertTrue;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class PlannerTest {
static {
ClassLoader.getSystemClassLoader().setDefaultAssertionStatus(true);
}
private Injector injector;
private Analyzer analyzer;
private Planner planner = new Planner();
Routing shardRouting = new Routing(ImmutableMap.<String, Map<String, Set<Integer>>>builder()
.put("nodeOne", ImmutableMap.<String, Set<Integer>>of("t1", ImmutableSet.of(1, 2)))
.put("nodeTow", ImmutableMap.<String, Set<Integer>>of("t1", ImmutableSet.of(3, 4)))
.build());
Routing nodesRouting = new Routing(ImmutableMap.<String, Map<String, Set<Integer>>>builder()
.put("nodeOne", ImmutableMap.<String, Set<Integer>>of())
.put("nodeTwo", ImmutableMap.<String, Set<Integer>>of())
.build());
class TestClusterTableInfo extends SysClusterTableInfo {
// granularity < DOC is already handled different
// here we want a table with handlerSideRouting and DOC granularity.
@Override
public RowGranularity rowGranularity() {
return RowGranularity.DOC;
}
}
class TestShardsTableInfo extends SysShardsTableInfo {
public TestShardsTableInfo() {
super(null);
}
@Override
public Routing getRouting(WhereClause whereClause) {
return shardRouting;
}
}
class TestNodesTableInfo extends SysNodesTableInfo {
public TestNodesTableInfo() {
super(null);
}
@Override
public Routing getRouting(WhereClause whereClause) {
return nodesRouting;
}
}
class TestSysModule extends MetaDataSysModule {
@Override
protected void bindTableInfos() {
tableInfoBinder.addBinding(TestNodesTableInfo.IDENT.name()).toInstance(
new TestNodesTableInfo());
tableInfoBinder.addBinding(TestShardsTableInfo.IDENT.name()).toInstance(
new TestShardsTableInfo());
tableInfoBinder.addBinding(TestClusterTableInfo.IDENT.name()).toInstance(
new TestClusterTableInfo());
}
}
class TestModule extends MetaDataModule {
@Override
protected void configure() {
ClusterService clusterService = mock(ClusterService.class);
bind(ClusterService.class).toInstance(clusterService);
super.configure();
}
@Override
protected void bindSchemas() {
super.bindSchemas();
SchemaInfo schemaInfo = mock(SchemaInfo.class);
TableIdent userTableIdent = new TableIdent(null, "users");
TableInfo userTableInfo = TestingTableInfo.builder(userTableIdent, RowGranularity.DOC, shardRouting)
.add("name", DataType.STRING, null)
.add("id", DataType.LONG, null)
.addPrimaryKey("id")
.build();
TableIdent charactersTableIdent = new TableIdent(null, "characters");
TableInfo charactersTableInfo = TestingTableInfo.builder(charactersTableIdent, RowGranularity.DOC, shardRouting)
.add("name", DataType.STRING, null)
.add("id", DataType.STRING, null)
.addPrimaryKey("id")
.build();
when(schemaInfo.getTableInfo(charactersTableIdent.name())).thenReturn(charactersTableInfo);
when(schemaInfo.getTableInfo(userTableIdent.name())).thenReturn(userTableInfo);
schemaBinder.addBinding(DocSchemaInfo.NAME).toInstance(schemaInfo);
}
}
@Before
public void setUp() throws Exception {
injector = new ModulesBuilder()
.add(new TestModule())
.add(new TestSysModule())
.add(new AggregationImplModule())
.add(new ScalarFunctionModule())
.add(new OperatorModule())
.createInjector();
analyzer = injector.getInstance(Analyzer.class);
}
private Plan plan(String statement) {
return planner.plan(analyzer.analyze(SqlParser.createStatement(statement)));
}
@Test
public void testGroupByWithAggregationStringLiteralArguments() {
Plan plan = plan("select count('foo'), name from users group by name");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode) iterator.next();
// TODO: optimize to not collect literal
//assertThat(collectNode.toCollect().size(), is(1));
GroupProjection groupProjection = (GroupProjection) collectNode.projections().get(0);
Aggregation aggregation = groupProjection.values().get(0);
//assertTrue(aggregation.inputs().get(0).symbolType().isLiteral());
}
@Test
public void testGroupByWithAggregationPlan() throws Exception {
Plan plan = plan("select count(*), name from users group by name");
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// distributed collect
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.downStreamNodes().size(), is(2));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.DOC));
assertThat(collectNode.executionNodes().size(), is(2));
assertThat(collectNode.toCollect().size(), is(1));
assertThat(collectNode.projections().size(), is(1));
assertThat(collectNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(collectNode.outputTypes().size(), is(2));
assertThat(collectNode.outputTypes().get(0), is(DataType.STRING));
assertThat(collectNode.outputTypes().get(1), is(DataType.NULL));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.numUpstreams(), is(2));
assertThat(mergeNode.executionNodes().size(), is(2));
assertEquals(mergeNode.inputTypes(), collectNode.outputTypes());
assertThat(mergeNode.projections().size(), is(1));
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
GroupProjection groupProjection = (GroupProjection) mergeNode.projections().get(0);
InputColumn inputColumn = (InputColumn) groupProjection.values().get(0).inputs().get(0);
assertThat(inputColumn.index(), is(1));
assertThat(mergeNode.outputTypes().size(), is(2));
assertThat(mergeNode.outputTypes().get(0), is(DataType.STRING));
assertThat(mergeNode.outputTypes().get(1), is(DataType.LONG));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode localMerge = (MergeNode) planNode;
assertThat(localMerge.numUpstreams(), is(2));
assertTrue(localMerge.executionNodes().isEmpty());
assertEquals(mergeNode.outputTypes(), localMerge.inputTypes());
assertThat(localMerge.projections().get(0), instanceOf(TopNProjection.class));
TopNProjection topN = (TopNProjection) localMerge.projections().get(0);
assertThat(topN.outputs().size(), is(2));
// groupProjection changes output to keys, aggregations
// topN needs to swap the outputs back
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(1));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(0));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGetPlan() throws Exception {
Plan plan = plan("select name from users where id = 1");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.ids().get(0), is("1"));
assertFalse(iterator.hasNext());
assertThat(node.outputs().size(), is(1));
}
@Test
public void testGetPlanStringLiteral() throws Exception {
Plan plan = plan("select name from characters where id = 'one'");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("characters"));
assertThat(node.ids().get(0), is("one"));
assertFalse(iterator.hasNext());
assertThat(node.outputs().size(), is(1));
}
@Test
public void testMultiGetPlan() throws Exception {
Plan plan = plan("select name from users where id in (1, 2)");
Iterator<PlanNode> iterator = plan.iterator();
ESGetNode node = (ESGetNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.ids().size(), is(2));
assertThat(node.ids().get(0), is("1"));
assertThat(node.ids().get(1), is("2"));
}
@Test
public void testDeletePlan() throws Exception {
Plan plan = plan("delete from users where id = 1");
Iterator<PlanNode> iterator = plan.iterator();
ESDeleteNode node = (ESDeleteNode) iterator.next();
assertThat(node.index(), is("users"));
assertThat(node.id(), is("1"));
assertFalse(iterator.hasNext());
}
@Test
public void testMultiDeletePlan() throws Exception {
Plan plan = plan("delete from users where id in (1, 2)");
Iterator<PlanNode> iterator = plan.iterator();
assertThat(iterator.next(), instanceOf(ESDeleteByQueryNode.class));
}
@Test
public void testGroupByWithAggregationAndLimit() throws Exception {
Plan plan = plan("select count(*), name from users group by name limit 1 offset 1");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
planNode = iterator.next();
// distributed merge
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.projections().get(0), instanceOf(GroupProjection.class));
assertThat(mergeNode.projections().get(1), instanceOf(TopNProjection.class));
// limit must include offset because the real limit can only be applied on the handler
// after all rows have been gathered.
TopNProjection topN = (TopNProjection) mergeNode.projections().get(1);
assertThat(topN.limit(), is(2));
assertThat(topN.offset(), is(0));
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(1));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(0));
// local merge
planNode = iterator.next();
assertThat(planNode.projections().get(0), instanceOf(TopNProjection.class));
topN = (TopNProjection) planNode.projections().get(0);
assertThat(topN.limit(), is(1));
assertThat(topN.offset(), is(1));
assertThat(topN.outputs().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(0)).index(), is(0));
assertThat(topN.outputs().get(1), instanceOf(InputColumn.class));
assertThat(((InputColumn) topN.outputs().get(1)).index(), is(1));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGlobalAggregationPlan() throws Exception {
String statementString = "select count(name) from users";
Statement statement = SqlParser.createStatement(statementString);
Analysis analysis = analyzer.analyze(statement);
Plan plan = planner.plan(analysis);
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.outputTypes().get(0), is(DataType.NULL));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.DOC));
assertThat(collectNode.projections().size(), is(1));
assertThat(collectNode.projections().get(0), instanceOf(AggregationProjection.class));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.inputTypes().get(0), is(DataType.NULL));
assertThat(mergeNode.outputTypes().get(0), is(DataType.LONG));
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testGroupByOnNodeLevel() throws Exception {
Plan plan = plan("select count(*), name from sys.nodes group by name");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode) iterator.next();
assertFalse(collectNode.hasDownstreams());
assertThat(collectNode.outputTypes().get(0), is(DataType.STRING));
assertThat(collectNode.outputTypes().get(1), is(DataType.LONG));
MergeNode mergeNode = (MergeNode) iterator.next();
assertThat(mergeNode.numUpstreams(), is(2));
assertThat(mergeNode.projections().size(), is(1));
TopNProjection projection = (TopNProjection) mergeNode.projections().get(0);
assertThat(((InputColumn) projection.outputs().get(0)).index(), is(1));
assertThat(((InputColumn) projection.outputs().get(1)).index(), is(0));
assertFalse(iterator.hasNext());
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testShardPlan() throws Exception {
Plan plan = plan("select id from sys.shards order by id limit 10");
// TODO: add where clause
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CollectNode.class));
CollectNode collectNode = (CollectNode) planNode;
assertThat(collectNode.outputTypes().get(0), is(DataType.INTEGER));
assertThat(collectNode.maxRowGranularity(), is(RowGranularity.SHARD));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
MergeNode mergeNode = (MergeNode) planNode;
assertThat(mergeNode.inputTypes().size(), is(1));
assertThat(mergeNode.inputTypes().get(0), is(DataType.INTEGER));
assertThat(mergeNode.outputTypes().size(), is(1));
assertThat(mergeNode.outputTypes().get(0), is(DataType.INTEGER));
assertThat(mergeNode.numUpstreams(), is(2));
PlanPrinter pp = new PlanPrinter();
System.out.println(pp.print(plan));
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testESSearchPlan() throws Exception {
Plan plan = plan("select name from users where name = 'x' order by id limit 10");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESSearchNode.class));
ESSearchNode searchNode = (ESSearchNode) planNode;
assertThat(searchNode.outputTypes().size(), is(1));
assertThat(searchNode.outputTypes().get(0), is(DataType.STRING));
assertTrue(searchNode.whereClause().hasQuery());
assertFalse(plan.expectsAffectedRows());
}
@Test
public void testESIndexPlan() throws Exception {
Plan plan = plan("insert into users (id, name) values (42, 'Deep Thought')");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESIndexNode.class));
ESIndexNode indexNode = (ESIndexNode) planNode;
assertThat(indexNode.columns().size(), is(2));
assertThat(indexNode.columns().get(0).valueType(), is(DataType.LONG));
assertThat(indexNode.columns().get(0).info().ident().columnIdent().name(), is("id"));
assertThat(indexNode.columns().get(1).valueType(), is(DataType.STRING));
assertThat(indexNode.columns().get(1).info().ident().columnIdent().name(), is("name"));
assertThat(indexNode.valuesLists().size(), is(1));
assertThat(((LongLiteral) indexNode.valuesLists().get(0).get(0)).value(), is(42l));
assertThat(((StringLiteral) indexNode.valuesLists().get(0).get(1)).value().utf8ToString(), is("Deep Thought"));
assertThat(indexNode.outputTypes().size(), is(1));
assertThat(indexNode.outputTypes().get(0), is(DataType.LONG));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testESIndexPlanMultipleValues() throws Exception {
Plan plan = plan("insert into users (id, name) values (42, 'Deep Thought'), (99, 'Marvin')");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESIndexNode.class));
ESIndexNode indexNode = (ESIndexNode) planNode;
assertThat(indexNode.valuesLists().size(), is(2));
assertThat(((LongLiteral) indexNode.valuesLists().get(0).get(0)).value(), is(42l));
assertThat(((StringLiteral) indexNode.valuesLists().get(0).get(1)).value().utf8ToString(), is("Deep Thought"));
assertThat(((LongLiteral) indexNode.valuesLists().get(1).get(0)).value(), is(99l));
assertThat(((StringLiteral) indexNode.valuesLists().get(1).get(1)).value().utf8ToString(), is("Marvin"));
assertThat(indexNode.outputTypes().size(), is(1));
assertThat(indexNode.outputTypes().get(0), is(DataType.LONG));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testCountDistinctPlan() throws Exception {
Plan plan = plan("select count(distinct name) from users");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode)iterator.next();
Projection projection = collectNode.projections().get(0);
assertThat(projection, instanceOf(AggregationProjection.class));
AggregationProjection aggregationProjection = (AggregationProjection)projection;
assertThat(aggregationProjection.aggregations().size(), is(1));
Aggregation aggregation = aggregationProjection.aggregations().get(0);
assertThat(aggregation.toStep(), is(Aggregation.Step.PARTIAL));
Symbol aggregationInput = aggregation.inputs().get(0);
assertThat(aggregationInput.symbolType(), is(SymbolType.INPUT_COLUMN));
assertThat(collectNode.toCollect().get(0), instanceOf(Reference.class));
assertThat(((Reference)collectNode.toCollect().get(0)).info().ident().columnIdent().name(), is("name"));
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(2));
Projection projection1 = mergeNode.projections().get(1);
assertThat(projection1, instanceOf(TopNProjection.class));
Symbol collection_count = projection1.outputs().get(0);
assertThat(collection_count, instanceOf(Function.class));
}
@Test
public void testGroupByWithOrderOnAggregate() throws Exception {
Plan plan = plan("select count(*), name from users group by name order by count(*)");
Iterator<PlanNode> iterator = plan.iterator();
CollectNode collectNode = (CollectNode)iterator.next();
// reducer
iterator.next();
// sort is on handler because there is no limit/offset
// handler
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(1));
TopNProjection topNProjection = (TopNProjection)mergeNode.projections().get(0);
Symbol orderBy = topNProjection.orderBy().get(0);
assertThat(orderBy, instanceOf(InputColumn.class));
// points to the first values() entry of the previous GroupProjection
assertThat(((InputColumn) orderBy).index(), is(1));
}
@Test
public void testHandlerSideRouting() throws Exception {
Plan plan = plan("select * from sys.cluster");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// just testing the dispatching here.. making sure it is not a ESSearchNode
assertThat(planNode, instanceOf(CollectNode.class));
}
@Test
public void testHandlerSideRoutingGroupBy() throws Exception {
Plan plan = plan("select count(*) from sys.cluster group by name");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
// just testing the dispatching here.. making sure it is not a ESSearchNode
assertThat(planNode, instanceOf(CollectNode.class));
planNode = iterator.next();
assertThat(planNode, instanceOf(MergeNode.class));
// no distributed merge, only 1 mergeNode
assertFalse(iterator.hasNext());
}
@Test
public void testCountDistinctWithGroupBy() throws Exception {
Plan plan = plan("select count(distinct id), name from users group by name order by count(distinct id)");
Iterator<PlanNode> iterator = plan.iterator();
// collect
CollectNode collectNode = (CollectNode)iterator.next();
assertThat(collectNode.toCollect().get(0), instanceOf(Reference.class));
assertThat(collectNode.toCollect().size(), is(2));
assertThat(((Reference)collectNode.toCollect().get(1)).info().ident().columnIdent().name(), is("id"));
assertThat(((Reference)collectNode.toCollect().get(0)).info().ident().columnIdent().name(), is("name"));
Projection projection = collectNode.projections().get(0);
assertThat(projection, instanceOf(GroupProjection.class));
GroupProjection groupProjection = (GroupProjection)projection;
Symbol groupKey = groupProjection.keys().get(0);
assertThat(groupKey, instanceOf(InputColumn.class));
assertThat(((InputColumn)groupKey).index(), is(0));
assertThat(groupProjection.values().size(), is(1));
Aggregation aggregation = groupProjection.values().get(0);
assertThat(aggregation.toStep(), is(Aggregation.Step.PARTIAL));
Symbol aggregationInput = aggregation.inputs().get(0);
assertThat(aggregationInput.symbolType(), is(SymbolType.INPUT_COLUMN));
// reducer
MergeNode mergeNode = (MergeNode)iterator.next();
assertThat(mergeNode.projections().size(), is(2));
Projection groupProjection1 = mergeNode.projections().get(0);
assertThat(groupProjection1, instanceOf(GroupProjection.class));
groupProjection = (GroupProjection)groupProjection1;
assertThat(groupProjection.keys().get(0), instanceOf(InputColumn.class));
assertThat(((InputColumn)groupProjection.keys().get(0)).index(), is(0));
assertThat(groupProjection.values().get(0), instanceOf(Aggregation.class));
Aggregation aggregationStep2 = groupProjection.values().get(0);
assertThat(aggregationStep2.toStep(), is(Aggregation.Step.FINAL));
TopNProjection topNProjection = (TopNProjection)mergeNode.projections().get(1);
Symbol collection_count = topNProjection.outputs().get(0);
assertThat(collection_count, instanceOf(Function.class));
// handler
MergeNode localMergeNode = (MergeNode)iterator.next();
assertThat(localMergeNode.projections().size(), is(1));
Projection localTopN = localMergeNode.projections().get(0);
assertThat(localTopN, instanceOf(TopNProjection.class));
}
@Test
public void testESUpdatePlan() throws Exception {
Plan plan = plan("update users set name='Vogon lyric fan' where id=1");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESUpdateNode.class));
ESUpdateNode updateNode = (ESUpdateNode)planNode;
assertThat(updateNode.index(), is("users"));
assertThat(updateNode.primaryKeyValues().length, is(1));
assertThat(updateNode.primaryKeyValues()[0], is("1"));
assertThat(updateNode.outputTypes().size(), is(1));
assertThat(updateNode.outputTypes().get(0), is(DataType.LONG));
Map.Entry<String, Object> entry = updateNode.updateDoc().entrySet().iterator().next();
assertThat(entry.getKey(), is("name"));
assertThat((String)entry.getValue(), is("Vogon lyric fan"));
assertTrue(plan.expectsAffectedRows());
}
@Test
public void testESUpdatePlanWithMultiplePrimaryKeyValues() throws Exception {
Plan plan = plan("update users set name='Vogon lyric fan' where id in (1,2,3)");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(ESUpdateNode.class));
ESUpdateNode updateNode = (ESUpdateNode)planNode;
assertThat(updateNode.primaryKeyValues().length, is(3));
assertThat(updateNode.primaryKeyValues(), arrayContainingInAnyOrder("1", "2", "3"));
}
@Test
public void testCopyFromPlan() throws Exception {
Plan plan = plan("copy users from '/path/to/file.extension'");
Iterator<PlanNode> iterator = plan.iterator();
PlanNode planNode = iterator.next();
assertThat(planNode, instanceOf(CopyNode.class));
CopyNode copyNode = (CopyNode)planNode;
assertThat(copyNode.index(), is("users"));
assertThat(copyNode.path(), is("/path/to/file.extension"));
}
}
| fix PlannerTest.testGroupByOnNodeLevel()
| sql/src/test/java/io/crate/planner/PlannerTest.java | fix PlannerTest.testGroupByOnNodeLevel() | <ide><path>ql/src/test/java/io/crate/planner/PlannerTest.java
<ide> CollectNode collectNode = (CollectNode) iterator.next();
<ide> assertFalse(collectNode.hasDownstreams());
<ide> assertThat(collectNode.outputTypes().get(0), is(DataType.STRING));
<del> assertThat(collectNode.outputTypes().get(1), is(DataType.LONG));
<add> assertThat(collectNode.outputTypes().get(1), is(DataType.NULL));
<ide>
<ide> MergeNode mergeNode = (MergeNode) iterator.next();
<ide> assertThat(mergeNode.numUpstreams(), is(2));
<del> assertThat(mergeNode.projections().size(), is(1));
<del> TopNProjection projection = (TopNProjection) mergeNode.projections().get(0);
<add> assertThat(mergeNode.projections().size(), is(2));
<add>
<add> assertThat(mergeNode.outputTypes().get(0), is(DataType.LONG));
<add> assertThat(mergeNode.outputTypes().get(1), is(DataType.STRING));
<add>
<add> GroupProjection groupProjection = (GroupProjection) mergeNode.projections().get(0);
<add> assertThat(groupProjection.keys().size(), is(1));
<add> assertThat(((InputColumn) groupProjection.outputs().get(0)).index(), is(0));
<add> assertThat(groupProjection.outputs().get(1), is(instanceOf(Aggregation.class)));
<add> assertThat(((Aggregation)groupProjection.outputs().get(1)).functionIdent().name(), is("count"));
<add> assertThat(((Aggregation)groupProjection.outputs().get(1)).fromStep(), is(Aggregation.Step.PARTIAL));
<add> assertThat(((Aggregation)groupProjection.outputs().get(1)).toStep(), is(Aggregation.Step.FINAL));
<add>
<add> TopNProjection projection = (TopNProjection) mergeNode.projections().get(1);
<ide> assertThat(((InputColumn) projection.outputs().get(0)).index(), is(1));
<ide> assertThat(((InputColumn) projection.outputs().get(1)).index(), is(0));
<ide> |
|
Java | mit | a24056eb877840377839df31150172fe63ee4296 | 0 | SquidDev-CC/CC-Tweaks,SquidDev-CC/CCTweaks | package squiddev.cctweaks.core.asm;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.tree.MethodNode;
import squiddev.cctweaks.core.reference.Config;
import squiddev.cctweaks.core.utils.DebugLogger;
import static org.objectweb.asm.Opcodes.*;
/**
* LuaJ related patches
*/
public class PatchLuaJ {
protected static final String DEBUG_INFO = "org/luaj/vm2/lib/DebugLib$DebugInfo";
protected static final String IGETSOURCE = "org/luaj/vm2/luajc/IGetSource";
protected static final String IGETSOURCE_TYPE = "L" + IGETSOURCE + ";";
/**
* Patch the Debug Library
* TODO: Make this work better than it does. Use ChickenLib or something
*
* @param bytes The bytes of the {@link org.luaj.vm2.lib.DebugLib.DebugInfo} class
* @return Reformatted bytes
*/
public static byte[] patchDebugLib(byte[] bytes) {
if (!Config.config.luaJC) return bytes;
// This is semi-auto generate code from the CCStudio patch
ClassNode classNode = new ClassNode();
ClassReader classReader = new ClassReader(bytes);
classReader.accept(classNode, 0);
classNode.visitField(0, "getSource", IGETSOURCE_TYPE, null, null).visitEnd();
for (MethodNode method : classNode.methods) {
if (method.name.equals("sourceline") && method.desc.equals("()Ljava/lang/String;")) {
method.instructions.clear();
method.localVariables = null;
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
Label l0 = new Label();
method.visitJumpInsn(IFNONNULL, l0);
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
Label l1 = new Label();
method.visitJumpInsn(IFNULL, l1);
method.visitTypeInsn(NEW, "java/lang/StringBuilder");
method.visitInsn(DUP);
method.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
method.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getSource", "()Ljava/lang/String;", true);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
method.visitLdcInsn(":");
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
method.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getLine", "()I", true);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
method.visitInsn(ARETURN);
method.visitLabel(l1);
method.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "func", "Lorg/luaj/vm2/LuaValue;");
method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaValue", "tojstring", "()Ljava/lang/String;", false);
method.visitInsn(ARETURN);
method.visitLabel(l0);
method.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
method.visitVarInsn(ALOAD, 0);
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/LuaClosure", "p", "Lorg/luaj/vm2/Prototype;");
method.visitFieldInsn(GETFIELD, "org/luaj/vm2/Prototype", "source", "Lorg/luaj/vm2/LuaString;");
method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaString", "tojstring", "()Ljava/lang/String;", false);
method.visitVarInsn(ASTORE, 1);
method.visitVarInsn(ALOAD, 0);
method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/lib/DebugLib$DebugInfo", "currentline", "()I", false);
method.visitVarInsn(ISTORE, 2);
method.visitTypeInsn(NEW, "java/lang/StringBuilder");
method.visitInsn(DUP);
method.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
method.visitVarInsn(ALOAD, 1);
method.visitLdcInsn("@");
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
Label l2 = new Label();
method.visitJumpInsn(IFNE, l2);
method.visitVarInsn(ALOAD, 1);
method.visitLdcInsn("=");
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
Label l3 = new Label();
method.visitJumpInsn(IFEQ, l3);
method.visitLabel(l2);
method.visitFrame(Opcodes.F_FULL, 3, new Object[]{"org/luaj/vm2/lib/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 1, new Object[]{"java/lang/StringBuilder"});
method.visitVarInsn(ALOAD, 1);
method.visitInsn(ICONST_1);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "substring", "(I)Ljava/lang/String;", false);
Label l4 = new Label();
method.visitJumpInsn(GOTO, l4);
method.visitLabel(l3);
method.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/StringBuilder"});
method.visitVarInsn(ALOAD, 1);
method.visitLabel(l4);
method.visitFrame(Opcodes.F_FULL, 3, new Object[]{"org/luaj/vm2/lib/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 2, new Object[]{"java/lang/StringBuilder", "java/lang/String"});
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
method.visitLdcInsn(":");
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
method.visitVarInsn(ILOAD, 2);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
method.visitInsn(ARETURN);
} else if (method.name.equals("setfunction") && method.desc.equals("(Lorg/luaj/vm2/LuaValue;)V")) {
method.instructions.clear();
method.localVariables = null;
method.visitVarInsn(ALOAD, 0);
method.visitVarInsn(ALOAD, 1);
method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "func", "Lorg/luaj/vm2/LuaValue;");
method.visitVarInsn(ALOAD, 0);
method.visitVarInsn(ALOAD, 1);
method.visitTypeInsn(INSTANCEOF, "org/luaj/vm2/LuaClosure");
Label l0 = new Label();
method.visitJumpInsn(IFEQ, l0);
method.visitVarInsn(ALOAD, 1);
method.visitTypeInsn(CHECKCAST, "org/luaj/vm2/LuaClosure");
Label l1 = new Label();
method.visitJumpInsn(GOTO, l1);
method.visitLabel(l0);
method.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
method.visitInsn(ACONST_NULL);
method.visitLabel(l1);
method.visitFrame(F_FULL, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaClosure"});
method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "closure", "Lorg/luaj/vm2/LuaClosure;");
method.visitVarInsn(ALOAD, 0);
method.visitVarInsn(ALOAD, 1);
method.visitTypeInsn(INSTANCEOF, IGETSOURCE);
Label l2 = new Label();
method.visitJumpInsn(IFEQ, l2);
method.visitVarInsn(ALOAD, 1);
method.visitTypeInsn(CHECKCAST, IGETSOURCE);
Label l3 = new Label();
method.visitJumpInsn(GOTO, l3);
method.visitLabel(l2);
method.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
method.visitInsn(ACONST_NULL);
method.visitLabel(l3);
method.visitFrame(F_FULL, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[]{DEBUG_INFO, IGETSOURCE});
method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "getSource", IGETSOURCE_TYPE);
method.visitInsn(RETURN);
}
}
DebugLogger.debug("Inject extra methods into DebugLib$DebugInfo");
// Something breaks
ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS | ClassWriter.COMPUTE_FRAMES);
classNode.accept(writer);
return writer.toByteArray();
}
}
| src/main/java/squiddev/cctweaks/core/asm/PatchLuaJ.java | package squiddev.cctweaks.core.asm;
import org.objectweb.asm.*;
import org.objectweb.asm.util.TraceClassVisitor;
import squiddev.cctweaks.core.reference.Config;
import squiddev.cctweaks.core.utils.DebugLogger;
import java.io.PrintWriter;
import java.io.StringWriter;
import static org.objectweb.asm.Opcodes.*;
/**
* LuaJ related patches
*/
public class PatchLuaJ {
protected static final String DEBUG_INFO = "org/luaj/vm2/lib/DebugLib$DebugInfo";
protected static final String IGETSOURCE = "org/luaj/vm2/luajc/IGetSource";
protected static final String IGETSOURCE_TYPE = "L" + IGETSOURCE + ";";
/**
* Patch the Debug Library
*
* @param bytes The bytes of the {@link org.luaj.vm2.lib.DebugLib.DebugInfo} class
* @return Reformatted bytes
*/
public static byte[] patchDebugLib(byte[] bytes) {
if(!Config.config.luaJC) return bytes;
// This is semi-auto generate code from the CCStudio patch
ClassWriter writer = new ClassWriter(0);
new ClassReader(bytes).accept(new DebugStateClassAdapter(writer), 0);
writer.visitField(0, "getSource", IGETSOURCE_TYPE, null, null).visitEnd();
DebugLogger.debug("Inject extra methods into DebugLib$DebugInfo");
try {
byte[] result = writer.toByteArray();
AsmUtils.validateClass(result);
StringWriter sWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(sWriter);
new ClassReader(result).accept(new TraceClassVisitor(printWriter), 0);
DebugLogger.debug("Validation result: " + sWriter.toString());
DebugLogger.debug("Validated");
return result;
} catch(Exception e) {
e.printStackTrace();
}
return bytes;
}
public static class DebugStateClassAdapter extends ClassVisitor {
public DebugStateClassAdapter(ClassVisitor cv) {
super(ASM5, cv);
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
MethodVisitor visitor = cv.visitMethod(access, name, desc, signature, exceptions);
if(visitor != null) {
if(name.equals("sourceline") && desc.equals("()Ljava/lang/String;")) {
return new DebugState_SourceLine_MethodAdapter(visitor);
} else if(name.equals("setfunction") && desc.equals("(Lorg/luaj/vm2/LuaValue;)V")) {
return new DebugState_SetFunction_MethodAdapter(visitor);
}
}
return visitor;
}
public static class DebugState_SetFunction_MethodAdapter extends MethodVisitor {
protected MethodVisitor _method;
public DebugState_SetFunction_MethodAdapter(MethodVisitor mv) {
super(ASM5);
_method = mv;
}
@Override
public void visitEnd() {
/*
void setfunction(LuaValue func) {
this.func = func;
this.closure = (func instanceof LuaClosure ? (LuaClosure) func : null);
this.getSource = (func instanceof IGetSource ? (IGetSource) func : null);
}
*/
MethodVisitor mv = _method;
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "func", "Lorg/luaj/vm2/LuaValue;");
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(INSTANCEOF, "org/luaj/vm2/LuaClosure");
Label l0 = new Label();
mv.visitJumpInsn(IFEQ, l0);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, "org/luaj/vm2/LuaClosure");
Label l1 = new Label();
mv.visitJumpInsn(GOTO, l1);
mv.visitLabel(l0);
mv.visitFrame(F_SAME1, 0, null, 1, new Object[] {DEBUG_INFO});
mv.visitInsn(ACONST_NULL);
mv.visitLabel(l1);
mv.visitFrame(F_FULL, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaClosure"});
mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "closure", "Lorg/luaj/vm2/LuaClosure;");
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(INSTANCEOF, IGETSOURCE);
Label l2 = new Label();
mv.visitJumpInsn(IFEQ, l2);
mv.visitVarInsn(ALOAD, 1);
mv.visitTypeInsn(CHECKCAST, IGETSOURCE);
Label l3 = new Label();
mv.visitJumpInsn(GOTO, l3);
mv.visitLabel(l2);
mv.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
mv.visitInsn(ACONST_NULL);
mv.visitLabel(l3);
mv.visitFrame(F_FULL, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[] {DEBUG_INFO, IGETSOURCE});
mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "getSource", IGETSOURCE_TYPE);
mv.visitInsn(RETURN);
mv.visitMaxs(2, 2);
mv.visitEnd();
}
}
public static class DebugState_SourceLine_MethodAdapter extends MethodVisitor {
protected MethodVisitor _method;
public DebugState_SourceLine_MethodAdapter(MethodVisitor mv) {
super(ASM5);
_method = mv;
}
@Override
public void visitEnd() {
/*
if (closure == null) {
if (getSource != null) {
return getSource.getSource() + ":" + getSource.getLine();
}
return func.tojstring();
}
String s = closure.p.source.tojstring();
int line = currentline();
return (s.startsWith("@") || s.startsWith("=") ? s.substring(1) : s) + ":" + line;
*/
MethodVisitor mv = _method;
mv.visitCode();
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
Label l0 = new Label();
mv.visitJumpInsn(IFNONNULL, l0);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
Label l1 = new Label();
mv.visitJumpInsn(IFNULL, l1);
mv.visitTypeInsn(NEW, "java/lang/StringBuilder");
mv.visitInsn(DUP);
mv.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
mv.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getSource", "()Ljava/lang/String;", true);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
mv.visitLdcInsn(":");
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
mv.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getLine", "()I", true);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
mv.visitInsn(ARETURN);
mv.visitLabel(l1);
mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "func", "Lorg/luaj/vm2/LuaValue;");
mv.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaValue", "tojstring", "()Ljava/lang/String;", false);
mv.visitInsn(ARETURN);
mv.visitLabel(l0);
mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
mv.visitFieldInsn(GETFIELD, "org/luaj/vm2/LuaClosure", "p", "Lorg/luaj/vm2/Prototype;");
mv.visitFieldInsn(GETFIELD, "org/luaj/vm2/Prototype", "source", "Lorg/luaj/vm2/LuaString;");
mv.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaString", "tojstring", "()Ljava/lang/String;", false);
mv.visitVarInsn(ASTORE, 1);
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKEVIRTUAL, "squiddev/cctweaks/DebugLib$DebugInfo", "currentline", "()I", false);
mv.visitVarInsn(ISTORE, 2);
mv.visitTypeInsn(NEW, "java/lang/StringBuilder");
mv.visitInsn(DUP);
mv.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
mv.visitVarInsn(ALOAD, 1);
mv.visitLdcInsn("@");
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
Label l2 = new Label();
mv.visitJumpInsn(IFNE, l2);
mv.visitVarInsn(ALOAD, 1);
mv.visitLdcInsn("=");
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
Label l3 = new Label();
mv.visitJumpInsn(IFEQ, l3);
mv.visitLabel(l2);
mv.visitFrame(Opcodes.F_FULL, 3, new Object[] {"squiddev/cctweaks/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 1, new Object[] {"java/lang/StringBuilder"});
mv.visitVarInsn(ALOAD, 1);
mv.visitInsn(ICONST_1);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "substring", "(I)Ljava/lang/String;", false);
Label l4 = new Label();
mv.visitJumpInsn(GOTO, l4);
mv.visitLabel(l3);
mv.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {"java/lang/StringBuilder"});
mv.visitVarInsn(ALOAD, 1);
mv.visitLabel(l4);
mv.visitFrame(Opcodes.F_FULL, 3, new Object[] {"squiddev/cctweaks/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 2, new Object[] {"java/lang/StringBuilder", "java/lang/String"});
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
mv.visitLdcInsn(":");
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
mv.visitVarInsn(ILOAD, 2);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
mv.visitInsn(ARETURN);
mv.visitMaxs(3, 3);
mv.visitEnd();
}
}
}
}
| Fix DebugLib patcher
| src/main/java/squiddev/cctweaks/core/asm/PatchLuaJ.java | Fix DebugLib patcher | <ide><path>rc/main/java/squiddev/cctweaks/core/asm/PatchLuaJ.java
<ide> package squiddev.cctweaks.core.asm;
<ide>
<del>import org.objectweb.asm.*;
<del>import org.objectweb.asm.util.TraceClassVisitor;
<add>import org.objectweb.asm.ClassReader;
<add>import org.objectweb.asm.ClassWriter;
<add>import org.objectweb.asm.Label;
<add>import org.objectweb.asm.Opcodes;
<add>import org.objectweb.asm.tree.ClassNode;
<add>import org.objectweb.asm.tree.MethodNode;
<ide> import squiddev.cctweaks.core.reference.Config;
<ide> import squiddev.cctweaks.core.utils.DebugLogger;
<del>
<del>import java.io.PrintWriter;
<del>import java.io.StringWriter;
<ide>
<ide> import static org.objectweb.asm.Opcodes.*;
<ide>
<ide> protected static final String DEBUG_INFO = "org/luaj/vm2/lib/DebugLib$DebugInfo";
<ide> protected static final String IGETSOURCE = "org/luaj/vm2/luajc/IGetSource";
<ide> protected static final String IGETSOURCE_TYPE = "L" + IGETSOURCE + ";";
<add>
<ide> /**
<ide> * Patch the Debug Library
<add> * TODO: Make this work better than it does. Use ChickenLib or something
<ide> *
<ide> * @param bytes The bytes of the {@link org.luaj.vm2.lib.DebugLib.DebugInfo} class
<ide> * @return Reformatted bytes
<ide> */
<ide> public static byte[] patchDebugLib(byte[] bytes) {
<del> if(!Config.config.luaJC) return bytes;
<add> if (!Config.config.luaJC) return bytes;
<ide>
<ide> // This is semi-auto generate code from the CCStudio patch
<del> ClassWriter writer = new ClassWriter(0);
<del> new ClassReader(bytes).accept(new DebugStateClassAdapter(writer), 0);
<add> ClassNode classNode = new ClassNode();
<add> ClassReader classReader = new ClassReader(bytes);
<add> classReader.accept(classNode, 0);
<ide>
<del> writer.visitField(0, "getSource", IGETSOURCE_TYPE, null, null).visitEnd();
<add> classNode.visitField(0, "getSource", IGETSOURCE_TYPE, null, null).visitEnd();
<ide>
<del> DebugLogger.debug("Inject extra methods into DebugLib$DebugInfo");
<del> try {
<del> byte[] result = writer.toByteArray();
<del> AsmUtils.validateClass(result);
<add> for (MethodNode method : classNode.methods) {
<add> if (method.name.equals("sourceline") && method.desc.equals("()Ljava/lang/String;")) {
<add> method.instructions.clear();
<add> method.localVariables = null;
<ide>
<del> StringWriter sWriter = new StringWriter();
<del> PrintWriter printWriter = new PrintWriter(sWriter);
<del> new ClassReader(result).accept(new TraceClassVisitor(printWriter), 0);
<del> DebugLogger.debug("Validation result: " + sWriter.toString());
<del> DebugLogger.debug("Validated");
<del> return result;
<del> } catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del> return bytes;
<del> }
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
<add> Label l0 = new Label();
<add> method.visitJumpInsn(IFNONNULL, l0);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<add> Label l1 = new Label();
<add> method.visitJumpInsn(IFNULL, l1);
<add> method.visitTypeInsn(NEW, "java/lang/StringBuilder");
<add> method.visitInsn(DUP);
<add> method.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<add> method.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getSource", "()Ljava/lang/String;", true);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<add> method.visitLdcInsn(":");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<add> method.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getLine", "()I", true);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
<add> method.visitInsn(ARETURN);
<add> method.visitLabel(l1);
<add> method.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "func", "Lorg/luaj/vm2/LuaValue;");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaValue", "tojstring", "()Ljava/lang/String;", false);
<add> method.visitInsn(ARETURN);
<add> method.visitLabel(l0);
<add> method.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/lib/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/LuaClosure", "p", "Lorg/luaj/vm2/Prototype;");
<add> method.visitFieldInsn(GETFIELD, "org/luaj/vm2/Prototype", "source", "Lorg/luaj/vm2/LuaString;");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaString", "tojstring", "()Ljava/lang/String;", false);
<add> method.visitVarInsn(ASTORE, 1);
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/lib/DebugLib$DebugInfo", "currentline", "()I", false);
<add> method.visitVarInsn(ISTORE, 2);
<add> method.visitTypeInsn(NEW, "java/lang/StringBuilder");
<add> method.visitInsn(DUP);
<add> method.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitLdcInsn("@");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
<add> Label l2 = new Label();
<add> method.visitJumpInsn(IFNE, l2);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitLdcInsn("=");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
<add> Label l3 = new Label();
<add> method.visitJumpInsn(IFEQ, l3);
<add> method.visitLabel(l2);
<add> method.visitFrame(Opcodes.F_FULL, 3, new Object[]{"org/luaj/vm2/lib/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 1, new Object[]{"java/lang/StringBuilder"});
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitInsn(ICONST_1);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "substring", "(I)Ljava/lang/String;", false);
<add> Label l4 = new Label();
<add> method.visitJumpInsn(GOTO, l4);
<add> method.visitLabel(l3);
<add> method.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/StringBuilder"});
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitLabel(l4);
<add> method.visitFrame(Opcodes.F_FULL, 3, new Object[]{"org/luaj/vm2/lib/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 2, new Object[]{"java/lang/StringBuilder", "java/lang/String"});
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<add> method.visitLdcInsn(":");
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<add> method.visitVarInsn(ILOAD, 2);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
<add> method.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
<add> method.visitInsn(ARETURN);
<add> } else if (method.name.equals("setfunction") && method.desc.equals("(Lorg/luaj/vm2/LuaValue;)V")) {
<add> method.instructions.clear();
<add> method.localVariables = null;
<ide>
<del> public static class DebugStateClassAdapter extends ClassVisitor {
<del> public DebugStateClassAdapter(ClassVisitor cv) {
<del> super(ASM5, cv);
<del> }
<del>
<del> @Override
<del> public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
<del> MethodVisitor visitor = cv.visitMethod(access, name, desc, signature, exceptions);
<del> if(visitor != null) {
<del> if(name.equals("sourceline") && desc.equals("()Ljava/lang/String;")) {
<del> return new DebugState_SourceLine_MethodAdapter(visitor);
<del> } else if(name.equals("setfunction") && desc.equals("(Lorg/luaj/vm2/LuaValue;)V")) {
<del> return new DebugState_SetFunction_MethodAdapter(visitor);
<del> }
<del> }
<del> return visitor;
<del> }
<del>
<del> public static class DebugState_SetFunction_MethodAdapter extends MethodVisitor {
<del> protected MethodVisitor _method;
<del>
<del> public DebugState_SetFunction_MethodAdapter(MethodVisitor mv) {
<del> super(ASM5);
<del> _method = mv;
<del> }
<del>
<del> @Override
<del> public void visitEnd() {
<del> /*
<del> void setfunction(LuaValue func) {
<del> this.func = func;
<del> this.closure = (func instanceof LuaClosure ? (LuaClosure) func : null);
<del> this.getSource = (func instanceof IGetSource ? (IGetSource) func : null);
<del> }
<del> */
<del> MethodVisitor mv = _method;
<del>
<del> mv.visitCode();
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "func", "Lorg/luaj/vm2/LuaValue;");
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitTypeInsn(INSTANCEOF, "org/luaj/vm2/LuaClosure");
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "func", "Lorg/luaj/vm2/LuaValue;");
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitTypeInsn(INSTANCEOF, "org/luaj/vm2/LuaClosure");
<ide> Label l0 = new Label();
<del> mv.visitJumpInsn(IFEQ, l0);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitTypeInsn(CHECKCAST, "org/luaj/vm2/LuaClosure");
<add> method.visitJumpInsn(IFEQ, l0);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitTypeInsn(CHECKCAST, "org/luaj/vm2/LuaClosure");
<ide> Label l1 = new Label();
<del> mv.visitJumpInsn(GOTO, l1);
<del> mv.visitLabel(l0);
<del> mv.visitFrame(F_SAME1, 0, null, 1, new Object[] {DEBUG_INFO});
<del> mv.visitInsn(ACONST_NULL);
<del> mv.visitLabel(l1);
<del> mv.visitFrame(F_FULL, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaClosure"});
<del> mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "closure", "Lorg/luaj/vm2/LuaClosure;");
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitTypeInsn(INSTANCEOF, IGETSOURCE);
<add> method.visitJumpInsn(GOTO, l1);
<add> method.visitLabel(l0);
<add> method.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
<add> method.visitInsn(ACONST_NULL);
<add> method.visitLabel(l1);
<add> method.visitFrame(F_FULL, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaClosure"});
<add> method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "closure", "Lorg/luaj/vm2/LuaClosure;");
<add> method.visitVarInsn(ALOAD, 0);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitTypeInsn(INSTANCEOF, IGETSOURCE);
<ide> Label l2 = new Label();
<del> mv.visitJumpInsn(IFEQ, l2);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitTypeInsn(CHECKCAST, IGETSOURCE);
<add> method.visitJumpInsn(IFEQ, l2);
<add> method.visitVarInsn(ALOAD, 1);
<add> method.visitTypeInsn(CHECKCAST, IGETSOURCE);
<ide> Label l3 = new Label();
<del> mv.visitJumpInsn(GOTO, l3);
<del> mv.visitLabel(l2);
<del> mv.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
<del> mv.visitInsn(ACONST_NULL);
<del> mv.visitLabel(l3);
<del> mv.visitFrame(F_FULL, 2, new Object[] {DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[] {DEBUG_INFO, IGETSOURCE});
<del> mv.visitFieldInsn(PUTFIELD, DEBUG_INFO, "getSource", IGETSOURCE_TYPE);
<del> mv.visitInsn(RETURN);
<del> mv.visitMaxs(2, 2);
<del> mv.visitEnd();
<add> method.visitJumpInsn(GOTO, l3);
<add> method.visitLabel(l2);
<add> method.visitFrame(F_SAME1, 0, null, 1, new Object[]{DEBUG_INFO});
<add> method.visitInsn(ACONST_NULL);
<add> method.visitLabel(l3);
<add> method.visitFrame(F_FULL, 2, new Object[]{DEBUG_INFO, "org/luaj/vm2/LuaValue"}, 2, new Object[]{DEBUG_INFO, IGETSOURCE});
<add> method.visitFieldInsn(PUTFIELD, DEBUG_INFO, "getSource", IGETSOURCE_TYPE);
<add> method.visitInsn(RETURN);
<ide> }
<ide> }
<ide>
<del> public static class DebugState_SourceLine_MethodAdapter extends MethodVisitor {
<del> protected MethodVisitor _method;
<add> DebugLogger.debug("Inject extra methods into DebugLib$DebugInfo");
<ide>
<del> public DebugState_SourceLine_MethodAdapter(MethodVisitor mv) {
<del> super(ASM5);
<del> _method = mv;
<del> }
<del>
<del> @Override
<del> public void visitEnd() {
<del> /*
<del> if (closure == null) {
<del> if (getSource != null) {
<del> return getSource.getSource() + ":" + getSource.getLine();
<del> }
<del> return func.tojstring();
<del> }
<del> String s = closure.p.source.tojstring();
<del> int line = currentline();
<del> return (s.startsWith("@") || s.startsWith("=") ? s.substring(1) : s) + ":" + line;
<del> */
<del> MethodVisitor mv = _method;
<del>
<del> mv.visitCode();
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
<del> Label l0 = new Label();
<del> mv.visitJumpInsn(IFNONNULL, l0);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<del> Label l1 = new Label();
<del> mv.visitJumpInsn(IFNULL, l1);
<del> mv.visitTypeInsn(NEW, "java/lang/StringBuilder");
<del> mv.visitInsn(DUP);
<del> mv.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<del> mv.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getSource", "()Ljava/lang/String;", true);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<del> mv.visitLdcInsn(":");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "getSource", "Lorg/luaj/vm2/luajc/IGetSource;");
<del> mv.visitMethodInsn(INVOKEINTERFACE, "org/luaj/vm2/luajc/IGetSource", "getLine", "()I", true);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
<del> mv.visitInsn(ARETURN);
<del> mv.visitLabel(l1);
<del> mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "func", "Lorg/luaj/vm2/LuaValue;");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaValue", "tojstring", "()Ljava/lang/String;", false);
<del> mv.visitInsn(ARETURN);
<del> mv.visitLabel(l0);
<del> mv.visitFrame(Opcodes.F_SAME, 0, null, 0, null);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitFieldInsn(GETFIELD, "squiddev/cctweaks/DebugLib$DebugInfo", "closure", "Lorg/luaj/vm2/LuaClosure;");
<del> mv.visitFieldInsn(GETFIELD, "org/luaj/vm2/LuaClosure", "p", "Lorg/luaj/vm2/Prototype;");
<del> mv.visitFieldInsn(GETFIELD, "org/luaj/vm2/Prototype", "source", "Lorg/luaj/vm2/LuaString;");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "org/luaj/vm2/LuaString", "tojstring", "()Ljava/lang/String;", false);
<del> mv.visitVarInsn(ASTORE, 1);
<del> mv.visitVarInsn(ALOAD, 0);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "squiddev/cctweaks/DebugLib$DebugInfo", "currentline", "()I", false);
<del> mv.visitVarInsn(ISTORE, 2);
<del> mv.visitTypeInsn(NEW, "java/lang/StringBuilder");
<del> mv.visitInsn(DUP);
<del> mv.visitMethodInsn(INVOKESPECIAL, "java/lang/StringBuilder", "<init>", "()V", false);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitLdcInsn("@");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
<del> Label l2 = new Label();
<del> mv.visitJumpInsn(IFNE, l2);
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitLdcInsn("=");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "startsWith", "(Ljava/lang/String;)Z", false);
<del> Label l3 = new Label();
<del> mv.visitJumpInsn(IFEQ, l3);
<del> mv.visitLabel(l2);
<del> mv.visitFrame(Opcodes.F_FULL, 3, new Object[] {"squiddev/cctweaks/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 1, new Object[] {"java/lang/StringBuilder"});
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitInsn(ICONST_1);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/String", "substring", "(I)Ljava/lang/String;", false);
<del> Label l4 = new Label();
<del> mv.visitJumpInsn(GOTO, l4);
<del> mv.visitLabel(l3);
<del> mv.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {"java/lang/StringBuilder"});
<del> mv.visitVarInsn(ALOAD, 1);
<del> mv.visitLabel(l4);
<del> mv.visitFrame(Opcodes.F_FULL, 3, new Object[] {"squiddev/cctweaks/DebugLib$DebugInfo", "java/lang/String", Opcodes.INTEGER}, 2, new Object[] {"java/lang/StringBuilder", "java/lang/String"});
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<del> mv.visitLdcInsn(":");
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;", false);
<del> mv.visitVarInsn(ILOAD, 2);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "append", "(I)Ljava/lang/StringBuilder;", false);
<del> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/StringBuilder", "toString", "()Ljava/lang/String;", false);
<del> mv.visitInsn(ARETURN);
<del> mv.visitMaxs(3, 3);
<del> mv.visitEnd();
<del> }
<del> }
<add> // Something breaks
<add> ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS | ClassWriter.COMPUTE_FRAMES);
<add> classNode.accept(writer);
<add> return writer.toByteArray();
<ide> }
<ide> } |
|
JavaScript | mit | baac9d1ebef8b1145338740775794af77ef7b870 | 0 | league-of-legends-devs/feeder.lol-item-sets-generator.org,league-of-legends-devs/feeder.lol-item-sets-generator.org | import queue from '../kue';
import runGenerator from '../generator';
import * as statsd from '../statsd';
import config from '../config';
import { connectMongo, disconnectMongo } from '../db';
const cronTask = () => {
if (config.env === 'production') {
console.log('Production mode.');
} else {
console.log('Dev mode.');
}
queue.watchStuckJobs(10000);
queue.on('error', (err) => {
console.error(err);
throw err;
});
queue.process('generator', async (job, done) => {
statsd.startGenerationTimer();
console.log('Init MongoDB connection ...');
try {
await connectMongo();
} catch (e) {
done(e);
console.error(e);
return;
}
console.log('Init MongoDB connection : done !');
try {
await runGenerator();
} catch (e) {
console.error(e);
done(e);
return;
} finally {
console.log('Shutting down MongoDB connection ...');
await disconnectMongo();
console.log('Shutting down MongoDB connection : done !');
statsd.stopGenerationTimer();
statsd.registerGeneration();
}
done();
});
const job = queue
.create('generator')
.ttl(1000 * 60 * 40) // 40 minutes timeout
.removeOnComplete(true)
.save((err) => {
if (err) {
console.error(`Job creation failed for ID ${job.id}`);
throw err;
} else {
console.log('Job created.');
}
});
job.on('complete', (/* result */) => {
console.log(`Job completed. Removing job ${job.id} ...`);
job.remove((err) => {
if (err) {
throw err;
}
console.log(`Removing job ${job.id} : done !`);
});
}).on('failed attempt', (/* errorMessage, doneAttempts */) => {
throw new Error('Job attempt failed.');
}).on('failed', (/* errorMessage */) => {
throw new Error('Job jailed.');
});
};
export default cronTask;
| src/cronTasks/generator.js | import queue from '../kue';
import runGenerator from '../generator';
import * as statsd from '../statsd';
import config from '../config';
import { connectMongo, disconnectMongo } from '../db';
const cronTask = () => {
if (config.env === 'production') {
console.log('Production mode.');
} else {
console.log('Dev mode.');
}
queue.watchStuckJobs(10000);
queue.on('error', (err) => {
console.error(err);
throw err;
});
queue.process('generator', async (job, done) => {
statsd.startGenerationTimer();
console.log('Init MongoDB connection ...');
try {
await connectMongo();
} catch (e) {
done(e);
console.error(e);
return;
}
console.log('Init MongoDB connection : done !');
try {
await runGenerator();
} catch (e) {
console.error(e);
done(e);
return;
} finally {
console.log('Shutting down MongoDB connection ...');
await disconnectMongo();
console.log('Shutting down MongoDB connection : done !');
statsd.stopGenerationTimer();
statsd.registerGeneration();
}
done();
});
const job = queue
.create('generator')
.ttl(1000 * 60 * 30) // 30 minutes timeout
.removeOnComplete(true)
.save((err) => {
if (err) {
console.error(`Job creation failed for ID ${job.id}`);
throw err;
} else {
console.log('Job created.');
}
});
job.on('complete', (/* result */) => {
console.log(`Job completed. Removing job ${job.id} ...`);
job.remove((err) => {
if (err) {
throw err;
}
console.log(`Removing job ${job.id} : done !`);
});
}).on('failed attempt', (/* errorMessage, doneAttempts */) => {
throw new Error('Job attempt failed.');
}).on('failed', (/* errorMessage */) => {
throw new Error('Job jailed.');
});
};
export default cronTask;
| feat: 30 => 40 sec timeout
| src/cronTasks/generator.js | feat: 30 => 40 sec timeout | <ide><path>rc/cronTasks/generator.js
<ide>
<ide> const job = queue
<ide> .create('generator')
<del> .ttl(1000 * 60 * 30) // 30 minutes timeout
<add> .ttl(1000 * 60 * 40) // 40 minutes timeout
<ide> .removeOnComplete(true)
<ide> .save((err) => {
<ide> if (err) { |
|
Java | bsd-3-clause | 2e21b782d22d872bed4ff75bc11f84980ddc3dbf | 0 | mucar89/SteelSeries-Swing,mcder017/SteelSeries-Swing,HanSolo/SteelSeries-Swing,gcauchis/SteelSeries-Swing | /*
* Copyright (c) 2012, Gerrit Grunwald
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package eu.hansolo.steelseries.extras;
import eu.hansolo.steelseries.gauges.AbstractGauge;
import eu.hansolo.steelseries.gauges.AbstractRadial;
import eu.hansolo.steelseries.tools.ColorDef;
import eu.hansolo.steelseries.tools.PostPosition;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.geom.Arc2D;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.Path2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import org.pushingpixels.trident.Timeline;
import org.pushingpixels.trident.ease.Spline;
/**
*
* @author hansolo
*/
public final class Compass extends AbstractRadial {
// <editor-fold defaultstate="collapsed" desc="Variable declaration">
private static final double MIN_VALUE = 0;
private static final double MAX_VALUE = 360;
private double value = 0;
private double angleStep = (2 * Math.PI) / (MAX_VALUE - MIN_VALUE);
private final Point2D CENTER = new Point2D.Double();
// Images used to combine layers for background and foreground
private BufferedImage bImage;
private BufferedImage fImage;
private BufferedImage compassRoseImage;
private BufferedImage pointerShadowImage;
private BufferedImage pointerImage;
private BufferedImage disabledImage;
private Timeline timeline = new Timeline(this);
private final Spline EASE = new Spline(0.5f);
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Constructor">
public Compass() {
super();
setPointerColor(ColorDef.RED);
init(getInnerBounds().width, getInnerBounds().height);
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Initialization">
@Override
public AbstractGauge init(final int WIDTH, final int HEIGHT) {
final int GAUGE_WIDTH = isFrameVisible() ? WIDTH : getGaugeBounds().width;
final int GAUGE_HEIGHT = isFrameVisible() ? HEIGHT : getGaugeBounds().height;
if (GAUGE_WIDTH <= 1 || GAUGE_HEIGHT <= 1) {
return this;
}
if (!isFrameVisible()) {
setFramelessOffset(-getGaugeBounds().width * 0.0841121495, -getGaugeBounds().width * 0.0841121495);
} else {
setFramelessOffset(getGaugeBounds().x, getGaugeBounds().y);
}
// Create Background Image
if (bImage != null) {
bImage.flush();
}
bImage = UTIL.createImage(GAUGE_WIDTH, GAUGE_WIDTH, java.awt.Transparency.TRANSLUCENT);
// Create Foreground Image
if (fImage != null) {
fImage.flush();
}
fImage = UTIL.createImage(GAUGE_WIDTH, GAUGE_WIDTH, java.awt.Transparency.TRANSLUCENT);
if (isFrameVisible()) {
switch (getFrameType()) {
/*case ROUND:
FRAME_FACTORY.createRadialFrame(GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;*/
case SQUARE:
FRAME_FACTORY.createLinearFrame(GAUGE_WIDTH, GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;
default:
FRAME_FACTORY.createRadialFrame(GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;
}
}
if (isBackgroundVisible()) {
create_BACKGROUND_Image(GAUGE_WIDTH, "", "", bImage);
}
if (compassRoseImage != null) {
compassRoseImage.flush();
}
compassRoseImage = create_COMPASS_ROSE_Image(GAUGE_WIDTH);
if (pointerShadowImage != null) {
pointerShadowImage.flush();
}
pointerShadowImage = create_POINTER_SHADOW_Image(GAUGE_WIDTH);
if (pointerImage != null) {
pointerImage.flush();
}
pointerImage = create_POINTER_Image(GAUGE_WIDTH);
createPostsImage(GAUGE_WIDTH, fImage, PostPosition.CENTER);
if (isForegroundVisible()) {
switch (getFrameType()) {
case SQUARE:
FOREGROUND_FACTORY.createLinearForeground(GAUGE_WIDTH, GAUGE_WIDTH, false, bImage);
break;
case ROUND:
default:
FOREGROUND_FACTORY.createRadialForeground(GAUGE_WIDTH, false, getForegroundType(), fImage);
break;
}
}
if (disabledImage != null) {
disabledImage.flush();
}
disabledImage = create_DISABLED_Image(GAUGE_WIDTH);
return this;
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Visualization">
@Override
protected void paintComponent(java.awt.Graphics g) {
if (!isInitialized()) {
return;
}
final Graphics2D G2 = (Graphics2D) g.create();
CENTER.setLocation(getGaugeBounds().getCenterX(), getGaugeBounds().getCenterX());
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
// Translate the coordinate system related to the insets
G2.translate(getFramelessOffset().getX(), getFramelessOffset().getY());
final AffineTransform OLD_TRANSFORM = G2.getTransform();
// Draw combined background image
G2.drawImage(bImage, 0, 0, null);
// Draw compass rose
G2.drawImage(compassRoseImage, 0, 0, null);
// Draw the pointer
G2.rotate((value - MIN_VALUE) * angleStep, CENTER.getX(), CENTER.getY() + 2);
G2.drawImage(pointerShadowImage, 0, 0, null);
G2.setTransform(OLD_TRANSFORM);
G2.rotate((value - MIN_VALUE) * angleStep, CENTER.getX(), CENTER.getY());
G2.drawImage(pointerImage, 0, 0, null);
G2.setTransform(OLD_TRANSFORM);
// Draw combined foreground image
G2.drawImage(fImage, 0, 0, null);
if (!isEnabled()) {
G2.drawImage(disabledImage, 0, 0, null);
}
// Translate the coordinate system back to original
G2.translate(-getInnerBounds().x, -getInnerBounds().y);
G2.dispose();
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Getters and Setters">
@Override
public double getValue() {
return value;
}
/**
* Sets the direction of the needle in degrees (0 - 360°)
* @param VALUE
*/
@Override
public void setValue(final double VALUE) {
if (isEnabled()) {
double oldValue = value;
value = VALUE % 360;
fireStateChanged();
firePropertyChange(VALUE_PROPERTY, oldValue, value);
repaint();
}
}
@Override
public void setValueAnimated(double newValue)
{
if (isEnabled())
{
if (Math.abs(value - newValue) > 180d)//Needle should always take the shortest way to its new position
if (value > newValue)
newValue += 360d;
else
value += 360d;
if (timeline.getState() == Timeline.TimelineState.PLAYING_FORWARD || timeline.getState() == Timeline.TimelineState.PLAYING_REVERSE)
timeline.abort();
timeline = new Timeline(this);
timeline.addPropertyToInterpolate("value", value, newValue);
timeline.setEase(EASE);
timeline.setDuration(this.getStdTimeToValue());
timeline.play();
value = newValue;
}
}
@Override
public double getMinValue() {
return MIN_VALUE;
}
@Override
public double getMaxValue() {
return MAX_VALUE;
}
@Override
public Point2D getCenter() {
return new Point2D.Double(bImage.getWidth() / 2.0 + getInnerBounds().x, bImage.getHeight() / 2.0 + getInnerBounds().y);
}
@Override
public Rectangle2D getBounds2D() {
return new Rectangle2D.Double(bImage.getMinX(), bImage.getMinY(), bImage.getWidth(), bImage.getHeight());
}
@Override
public Rectangle getLcdBounds() {
return new Rectangle();
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Image related">
private BufferedImage create_BIG_ROSE_POINTER_Image(final int WIDTH) {
final BufferedImage IMAGE = UTIL.createImage((int) (WIDTH * 0.0546875f), (int) (WIDTH * 0.2f), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
G2.setStroke(new BasicStroke(0.75f));
// Define arrow shape of pointer
final GeneralPath POINTER_WHITE_LEFT = new GeneralPath();
final GeneralPath POINTER_WHITE_RIGHT = new GeneralPath();
POINTER_WHITE_LEFT.moveTo(IMAGE_WIDTH - IMAGE_WIDTH * 0.95f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, 0);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.closePath();
POINTER_WHITE_RIGHT.moveTo(IMAGE_WIDTH * 0.95f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, 0);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.closePath();
final Area POINTER_FRAME_WHITE = new Area(POINTER_WHITE_LEFT);
POINTER_FRAME_WHITE.add(new Area(POINTER_WHITE_RIGHT));
final Color STROKE_COLOR = getBackgroundColor().SYMBOL_COLOR.darker();
final Color FILL_COLOR = getBackgroundColor().SYMBOL_COLOR;
G2.setColor(STROKE_COLOR);
G2.fill(POINTER_WHITE_RIGHT);
G2.setColor(FILL_COLOR);
G2.fill(POINTER_WHITE_LEFT);
G2.setColor(STROKE_COLOR);
G2.draw(POINTER_FRAME_WHITE);
G2.dispose();
return IMAGE;
}
private BufferedImage create_SMALL_ROSE_POINTER_Image(final int WIDTH) {
final BufferedImage IMAGE = UTIL.createImage((int) (WIDTH * 0.0546875f), (int) (WIDTH * 0.2f), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
G2.setStroke(new BasicStroke(0.75f));
// Define arrow shape of pointer
final GeneralPath POINTER_WHITE_LEFT = new GeneralPath();
final GeneralPath POINTER_WHITE_RIGHT = new GeneralPath();
POINTER_WHITE_LEFT.moveTo(IMAGE_WIDTH - IMAGE_WIDTH * 0.75f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT / 2.0f);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.closePath();
POINTER_WHITE_RIGHT.moveTo(IMAGE_WIDTH * 0.75f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT / 2.0f);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.closePath();
final Area POINTER_FRAME_WHITE = new Area(POINTER_WHITE_LEFT);
POINTER_FRAME_WHITE.add(new Area(POINTER_WHITE_RIGHT));
final Color STROKE_COLOR = getBackgroundColor().SYMBOL_COLOR.darker();
final Color FILL_COLOR = getBackgroundColor().SYMBOL_COLOR;
G2.setColor(FILL_COLOR);
G2.fill(POINTER_FRAME_WHITE);
G2.setColor(STROKE_COLOR);
G2.draw(POINTER_FRAME_WHITE);
G2.dispose();
return IMAGE;
}
private BufferedImage create_COMPASS_ROSE_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, WIDTH, java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
//G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
//G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
//G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
//G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
//final int IMAGE_HEIGHT = IMAGE.getHeight();
// ******************* COMPASS ROSE *************************************************
final Point2D COMPASS_CENTER = new Point2D.Double(IMAGE_WIDTH / 2.0f, IMAGE_WIDTH / 2.0f);
AffineTransform transform = G2.getTransform();
G2.setStroke(new BasicStroke(IMAGE_WIDTH * 0.01953125f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
for (int i = 0; i <= 360; i += 30) {
G2.draw(new Arc2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.263671875f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.263671875f, IMAGE_WIDTH * 0.52734375f, IMAGE_WIDTH * 0.52734375f, i, 15, Arc2D.OPEN));
}
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(0.5f));
java.awt.Shape outerCircle = new Ellipse2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.2734375f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.2734375f, IMAGE_WIDTH * 0.546875f, IMAGE_WIDTH * 0.546875f);
G2.draw(outerCircle);
java.awt.Shape innerCircle = new Ellipse2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.25390625f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.25390625f, IMAGE_WIDTH * 0.5078125f, IMAGE_WIDTH * 0.5078125f);
G2.draw(innerCircle);
final java.awt.geom.Line2D LINE = new java.awt.geom.Line2D.Double(COMPASS_CENTER.getX(), IMAGE_WIDTH * 0.4018691589, COMPASS_CENTER.getX(), IMAGE_WIDTH * 0.1495327103);
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(1f));
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.setTransform(transform);
final BufferedImage BIG_ROSE_POINTER = create_BIG_ROSE_POINTER_Image(IMAGE_WIDTH);
final BufferedImage SMALL_ROSE_POINTER = create_SMALL_ROSE_POINTER_Image(IMAGE_WIDTH);
final Point2D OFFSET = new Point2D.Double(IMAGE_WIDTH * 0.475f, IMAGE_WIDTH * 0.20f);
G2.translate(OFFSET.getX(), OFFSET.getY());
// N
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// NE
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// E
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// SE
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// S
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// SW
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// W
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// NW
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
G2.setTransform(transform);
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(IMAGE_WIDTH * 0.00953125f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
G2.draw(new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.1025f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.1025f), IMAGE_WIDTH * 0.205f, IMAGE_WIDTH * 0.205f));
G2.setStroke(new BasicStroke(0.5f));
G2.setColor(getBackgroundColor().SYMBOL_COLOR.darker());
final java.awt.Shape OUTER_ROSE_ELLIPSE = new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.11f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.11f), IMAGE_WIDTH * 0.22f, IMAGE_WIDTH * 0.22f);
G2.draw(OUTER_ROSE_ELLIPSE);
final java.awt.Shape INNER_ROSE_ELLIPSE = new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.095f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.095f), IMAGE_WIDTH * 0.19f, IMAGE_WIDTH * 0.19f);
G2.draw(INNER_ROSE_ELLIPSE);
// ******************* TICKMARKS ****************************************************
create_TICKMARKS(G2, IMAGE_WIDTH);
G2.dispose();
return IMAGE;
}
private void create_TICKMARKS(final Graphics2D G2, final int IMAGE_WIDTH) {
// Store former transformation
final AffineTransform FORMER_TRANSFORM = G2.getTransform();
final BasicStroke MEDIUM_STROKE = new BasicStroke(0.005859375f * IMAGE_WIDTH, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
final BasicStroke THIN_STROKE = new BasicStroke(0.00390625f * IMAGE_WIDTH, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
final java.awt.Font BIG_FONT = new java.awt.Font("Serif", java.awt.Font.PLAIN, (int) (0.12f * IMAGE_WIDTH));
final java.awt.Font SMALL_FONT = new java.awt.Font("Serif", java.awt.Font.PLAIN, (int) (0.06f * IMAGE_WIDTH));
final float TEXT_DISTANCE = 0.0750f * IMAGE_WIDTH;
final float MIN_LENGTH = 0.015625f * IMAGE_WIDTH;
final float MED_LENGTH = 0.0234375f * IMAGE_WIDTH;
final float MAX_LENGTH = 0.03125f * IMAGE_WIDTH;
final Color TEXT_COLOR = getBackgroundColor().LABEL_COLOR;
final Color TICK_COLOR = getBackgroundColor().LABEL_COLOR;
// Create the watch itself
final float RADIUS = IMAGE_WIDTH * 0.38f;
final Point2D COMPASS_CENTER = new Point2D.Double(IMAGE_WIDTH / 2.0f, IMAGE_WIDTH / 2.0f);
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
// Draw ticks
Point2D innerPoint;
Point2D outerPoint;
Point2D textPoint = null;
java.awt.geom.Line2D tick;
int tickCounter90 = 0;
int tickCounter15 = 0;
int tickCounter5 = 0;
int counter = 0;
double sinValue = 0;
double cosValue = 0;
final double STEP = (2.0d * Math.PI) / (360.0d);
for (double alpha = 2 * Math.PI; alpha >= 0; alpha -= STEP) {
G2.setStroke(THIN_STROKE);
sinValue = Math.sin(alpha);
cosValue = Math.cos(alpha);
G2.setColor(TICK_COLOR);
if (tickCounter5 == 5) {
G2.setStroke(THIN_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MIN_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MIN_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter5 = 0;
}
// Different tickmark every 15 units
if (tickCounter15 == 15) {
G2.setStroke(THIN_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MED_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MED_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter15 = 0;
tickCounter90 += 15;
}
// Different tickmark every 90 units plus text
if (tickCounter90 == 90) {
G2.setStroke(MEDIUM_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MAX_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MAX_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter90 = 0;
}
// Draw text
G2.setFont(BIG_FONT);
G2.setColor(TEXT_COLOR);
textPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - TEXT_DISTANCE) * sinValue, COMPASS_CENTER.getY() + (RADIUS - TEXT_DISTANCE) * cosValue);
switch (counter) {
case 360:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "S", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 45:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "SW", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 90:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "W", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 135:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "NW", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 180:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "N", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 225:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "NE", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 270:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "E", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 315:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "SE", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
}
G2.setTransform(FORMER_TRANSFORM);
tickCounter5++;
tickCounter15++;
counter++;
}
// Restore former transformation
G2.setTransform(FORMER_TRANSFORM);
}
@Override
protected BufferedImage create_POINTER_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, WIDTH, java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
switch (getPointerType()) {
case TYPE2:
final GeneralPath NORTHPOINTER2 = new GeneralPath();
NORTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER2.moveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.closePath();
final Point2D NORTHPOINTER2_START = new Point2D.Double(NORTHPOINTER2.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER2_STOP = new Point2D.Double(NORTHPOINTER2.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER2_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER2_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER2_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER2_START, NORTHPOINTER2_STOP, NORTHPOINTER2_FRACTIONS, NORTHPOINTER2_COLORS);
G2.setPaint(NORTHPOINTER2_GRADIENT);
G2.fill(NORTHPOINTER2);
G2.setColor(getPointerColor().DARK);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(NORTHPOINTER2);
final GeneralPath SOUTHPOINTER2 = new GeneralPath();
SOUTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER2.moveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.closePath();
final Point2D SOUTHPOINTER2_START = new Point2D.Double(SOUTHPOINTER2.getBounds2D().getMinX(), 0);
final Point2D SOUTHPOINTER2_STOP = new Point2D.Double(SOUTHPOINTER2.getBounds2D().getMaxX(), 0);
final float[] SOUTHPOINTER2_FRACTIONS = {
0.0f,
0.48f,
0.48009998f,
1.0f
};
final Color[] SOUTHPOINTER2_COLORS = {
new Color(227, 229, 232, 255),
new Color(227, 229, 232, 255),
new Color(171, 177, 184, 255),
new Color(171, 177, 184, 255)
};
final java.awt.LinearGradientPaint SOUTHPOINTER2_GRADIENT = new java.awt.LinearGradientPaint(SOUTHPOINTER2_START, SOUTHPOINTER2_STOP, SOUTHPOINTER2_FRACTIONS, SOUTHPOINTER2_COLORS);
G2.setPaint(SOUTHPOINTER2_GRADIENT);
G2.fill(SOUTHPOINTER2);
G2.setColor(new Color(0xABB1B8));
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(SOUTHPOINTER2);
break;
case TYPE3:
final GeneralPath NORTHPOINTER3 = new GeneralPath();
NORTHPOINTER3.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER3.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5560747663551402);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.closePath();
final Point2D NORTHPOINTER3_START = new Point2D.Double(NORTHPOINTER3.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER3_STOP = new Point2D.Double(NORTHPOINTER3.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER3_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER3_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER3_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER3_START, NORTHPOINTER3_STOP, NORTHPOINTER3_FRACTIONS, NORTHPOINTER3_COLORS);
G2.setPaint(NORTHPOINTER3_GRADIENT);
G2.fill(NORTHPOINTER3);
break;
case TYPE1:
default:
final GeneralPath NORTHPOINTER1 = new GeneralPath();
NORTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.closePath();
final Point2D NORTHPOINTER1_START = new Point2D.Double(NORTHPOINTER1.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER1_STOP = new Point2D.Double(NORTHPOINTER1.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER1_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER1_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER1_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER1_START, NORTHPOINTER1_STOP, NORTHPOINTER1_FRACTIONS, NORTHPOINTER1_COLORS);
G2.setPaint(NORTHPOINTER1_GRADIENT);
G2.fill(NORTHPOINTER1);
G2.setColor(getPointerColor().DARK);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(NORTHPOINTER1);
final GeneralPath SOUTHPOINTER1 = new GeneralPath();
SOUTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.closePath();
final Point2D SOUTHPOINTER1_START = new Point2D.Double(SOUTHPOINTER1.getBounds2D().getMinX(), 0);
final Point2D SOUTHPOINTER1_STOP = new Point2D.Double(SOUTHPOINTER1.getBounds2D().getMaxX(), 0);
final float[] SOUTHPOINTER1_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] SOUTHPOINTER1_COLORS = {
new Color(227, 229, 232, 255),
new Color(227, 229, 232, 255),
new Color(171, 177, 184, 255),
new Color(171, 177, 184, 255)
};
final java.awt.LinearGradientPaint SOUTHPOINTER1_GRADIENT = new java.awt.LinearGradientPaint(SOUTHPOINTER1_START, SOUTHPOINTER1_STOP, SOUTHPOINTER1_FRACTIONS, SOUTHPOINTER1_COLORS);
G2.setPaint(SOUTHPOINTER1_GRADIENT);
G2.fill(SOUTHPOINTER1);
final Color STROKE_COLOR_SOUTHPOINTER1 = new Color(0xABB1B8);
G2.setColor(STROKE_COLOR_SOUTHPOINTER1);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(SOUTHPOINTER1);
break;
}
G2.dispose();
return IMAGE;
}
@Override
protected BufferedImage create_POINTER_SHADOW_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, (int) (1.0 * WIDTH), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
final Color SHADOW_COLOR = new Color(0.0f, 0.0f, 0.0f, 0.65f);
switch (getPointerType()) {
case TYPE2:
final GeneralPath NORTHPOINTER2 = new GeneralPath();
NORTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER2.moveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.closePath();
final GeneralPath SOUTHPOINTER2 = new GeneralPath();
SOUTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER2.moveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER2);
G2.fill(SOUTHPOINTER2);
break;
case TYPE3:
final GeneralPath NORTHPOINTER3 = new GeneralPath();
NORTHPOINTER3.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER3.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5560747663551402);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER3);
break;
case TYPE1:
default:
final GeneralPath NORTHPOINTER1 = new GeneralPath();
NORTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.514018691588785, IMAGE_HEIGHT * 0.45794392523364486, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.4158878504672897, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.40186915887850466);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5046728971962616, IMAGE_HEIGHT * 0.38317757009345793, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.38317757009345793, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.397196261682243);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.4158878504672897, IMAGE_WIDTH * 0.48598130841121495, IMAGE_HEIGHT * 0.45794392523364486, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5327102803738317);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.closePath();
final GeneralPath SOUTHPOINTER1 = new GeneralPath();
SOUTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.514018691588785, IMAGE_HEIGHT * 0.5420560747663551, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.5841121495327103, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.602803738317757);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5046728971962616, IMAGE_HEIGHT * 0.616822429906542, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.616822429906542, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.602803738317757);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.5841121495327103, IMAGE_WIDTH * 0.48598130841121495, IMAGE_HEIGHT * 0.5420560747663551, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5093457943925234, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.4672897196261682, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4672897196261682);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4672897196261682, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5093457943925234, IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER1);
G2.fill(SOUTHPOINTER1);
break;
}
G2.dispose();
return IMAGE;
}
// </editor-fold>
@Override
public String toString() {
return "Compass";
}
}
| src/main/java/eu/hansolo/steelseries/extras/Compass.java | /*
* Copyright (c) 2012, Gerrit Grunwald
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* The names of its contributors may not be used to endorse or promote
* products derived from this software without specific prior written
* permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package eu.hansolo.steelseries.extras;
import eu.hansolo.steelseries.gauges.AbstractGauge;
import eu.hansolo.steelseries.gauges.AbstractRadial;
import eu.hansolo.steelseries.tools.ColorDef;
import eu.hansolo.steelseries.tools.PostPosition;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.geom.Arc2D;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.Path2D;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import org.pushingpixels.trident.Timeline;
import org.pushingpixels.trident.ease.Spline;
/**
*
* @author hansolo
*/
public final class Compass extends AbstractRadial {
// <editor-fold defaultstate="collapsed" desc="Variable declaration">
private static final double MIN_VALUE = 0;
private static final double MAX_VALUE = 360;
private double value = 0;
private double angleStep = (2 * Math.PI) / (MAX_VALUE - MIN_VALUE);
private final Point2D CENTER = new Point2D.Double();
// Images used to combine layers for background and foreground
private BufferedImage bImage;
private BufferedImage fImage;
private BufferedImage compassRoseImage;
private BufferedImage pointerShadowImage;
private BufferedImage pointerImage;
private BufferedImage disabledImage;
private Timeline timeline = new Timeline(this);
private final Spline EASE = new Spline(0.5f);
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Constructor">
public Compass() {
super();
setPointerColor(ColorDef.RED);
init(getInnerBounds().width, getInnerBounds().height);
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Initialization">
@Override
public AbstractGauge init(final int WIDTH, final int HEIGHT) {
final int GAUGE_WIDTH = isFrameVisible() ? WIDTH : getGaugeBounds().width;
final int GAUGE_HEIGHT = isFrameVisible() ? HEIGHT : getGaugeBounds().height;
if (GAUGE_WIDTH <= 1 || GAUGE_HEIGHT <= 1) {
return this;
}
if (!isFrameVisible()) {
setFramelessOffset(-getGaugeBounds().width * 0.0841121495, -getGaugeBounds().width * 0.0841121495);
} else {
setFramelessOffset(getGaugeBounds().x, getGaugeBounds().y);
}
// Create Background Image
if (bImage != null) {
bImage.flush();
}
bImage = UTIL.createImage(GAUGE_WIDTH, GAUGE_WIDTH, java.awt.Transparency.TRANSLUCENT);
// Create Foreground Image
if (fImage != null) {
fImage.flush();
}
fImage = UTIL.createImage(GAUGE_WIDTH, GAUGE_WIDTH, java.awt.Transparency.TRANSLUCENT);
if (isFrameVisible()) {
switch (getFrameType()) {
case ROUND:
FRAME_FACTORY.createRadialFrame(GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;
case SQUARE:
FRAME_FACTORY.createLinearFrame(GAUGE_WIDTH, GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;
default:
FRAME_FACTORY.createRadialFrame(GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
break;
}
}
if (isBackgroundVisible()) {
create_BACKGROUND_Image(GAUGE_WIDTH, "", "", bImage);
}
if (compassRoseImage != null) {
compassRoseImage.flush();
}
compassRoseImage = create_COMPASS_ROSE_Image(GAUGE_WIDTH);
if (pointerShadowImage != null) {
pointerShadowImage.flush();
}
pointerShadowImage = create_POINTER_SHADOW_Image(GAUGE_WIDTH);
if (pointerImage != null) {
pointerImage.flush();
}
pointerImage = create_POINTER_Image(GAUGE_WIDTH);
createPostsImage(GAUGE_WIDTH, fImage, PostPosition.CENTER);
if (isForegroundVisible()) {
switch (getFrameType()) {
case SQUARE:
FOREGROUND_FACTORY.createLinearForeground(GAUGE_WIDTH, GAUGE_WIDTH, false, bImage);
break;
case ROUND:
default:
FOREGROUND_FACTORY.createRadialForeground(GAUGE_WIDTH, false, getForegroundType(), fImage);
break;
}
}
if (disabledImage != null) {
disabledImage.flush();
}
disabledImage = create_DISABLED_Image(GAUGE_WIDTH);
return this;
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Visualization">
@Override
protected void paintComponent(java.awt.Graphics g) {
if (!isInitialized()) {
return;
}
final Graphics2D G2 = (Graphics2D) g.create();
CENTER.setLocation(getGaugeBounds().getCenterX(), getGaugeBounds().getCenterX());
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
// Translate the coordinate system related to the insets
G2.translate(getFramelessOffset().getX(), getFramelessOffset().getY());
final AffineTransform OLD_TRANSFORM = G2.getTransform();
// Draw combined background image
G2.drawImage(bImage, 0, 0, null);
// Draw compass rose
G2.drawImage(compassRoseImage, 0, 0, null);
// Draw the pointer
G2.rotate((value - MIN_VALUE) * angleStep, CENTER.getX(), CENTER.getY() + 2);
G2.drawImage(pointerShadowImage, 0, 0, null);
G2.setTransform(OLD_TRANSFORM);
G2.rotate((value - MIN_VALUE) * angleStep, CENTER.getX(), CENTER.getY());
G2.drawImage(pointerImage, 0, 0, null);
G2.setTransform(OLD_TRANSFORM);
// Draw combined foreground image
G2.drawImage(fImage, 0, 0, null);
if (!isEnabled()) {
G2.drawImage(disabledImage, 0, 0, null);
}
// Translate the coordinate system back to original
G2.translate(-getInnerBounds().x, -getInnerBounds().y);
G2.dispose();
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Getters and Setters">
@Override
public double getValue() {
return value;
}
/**
* Sets the direction of the needle in degrees (0 - 360°)
* @param VALUE
*/
@Override
public void setValue(final double VALUE) {
if (isEnabled()) {
double oldValue = value;
value = VALUE % 360;
fireStateChanged();
firePropertyChange(VALUE_PROPERTY, oldValue, value);
repaint();
}
}
@Override
public void setValueAnimated(double newValue)
{
if (isEnabled())
{
if (Math.abs(value - newValue) > 180d)//Needle should always take the shortest way to its new position
if (value > newValue)
newValue += 360d;
else
value += 360d;
if (timeline.getState() == Timeline.TimelineState.PLAYING_FORWARD || timeline.getState() == Timeline.TimelineState.PLAYING_REVERSE)
timeline.abort();
timeline = new Timeline(this);
timeline.addPropertyToInterpolate("value", value, newValue);
timeline.setEase(EASE);
timeline.setDuration(this.getStdTimeToValue());
timeline.play();
value = newValue;
}
}
@Override
public double getMinValue() {
return MIN_VALUE;
}
@Override
public double getMaxValue() {
return MAX_VALUE;
}
@Override
public Point2D getCenter() {
return new Point2D.Double(bImage.getWidth() / 2.0 + getInnerBounds().x, bImage.getHeight() / 2.0 + getInnerBounds().y);
}
@Override
public Rectangle2D getBounds2D() {
return new Rectangle2D.Double(bImage.getMinX(), bImage.getMinY(), bImage.getWidth(), bImage.getHeight());
}
@Override
public Rectangle getLcdBounds() {
return new Rectangle();
}
// </editor-fold>
// <editor-fold defaultstate="collapsed" desc="Image related">
private BufferedImage create_BIG_ROSE_POINTER_Image(final int WIDTH) {
final BufferedImage IMAGE = UTIL.createImage((int) (WIDTH * 0.0546875f), (int) (WIDTH * 0.2f), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
G2.setStroke(new BasicStroke(0.75f));
// Define arrow shape of pointer
final GeneralPath POINTER_WHITE_LEFT = new GeneralPath();
final GeneralPath POINTER_WHITE_RIGHT = new GeneralPath();
POINTER_WHITE_LEFT.moveTo(IMAGE_WIDTH - IMAGE_WIDTH * 0.95f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, 0);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.closePath();
POINTER_WHITE_RIGHT.moveTo(IMAGE_WIDTH * 0.95f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, 0);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.closePath();
final Area POINTER_FRAME_WHITE = new Area(POINTER_WHITE_LEFT);
POINTER_FRAME_WHITE.add(new Area(POINTER_WHITE_RIGHT));
final Color STROKE_COLOR = getBackgroundColor().SYMBOL_COLOR.darker();
final Color FILL_COLOR = getBackgroundColor().SYMBOL_COLOR;
G2.setColor(STROKE_COLOR);
G2.fill(POINTER_WHITE_RIGHT);
G2.setColor(FILL_COLOR);
G2.fill(POINTER_WHITE_LEFT);
G2.setColor(STROKE_COLOR);
G2.draw(POINTER_FRAME_WHITE);
G2.dispose();
return IMAGE;
}
private BufferedImage create_SMALL_ROSE_POINTER_Image(final int WIDTH) {
final BufferedImage IMAGE = UTIL.createImage((int) (WIDTH * 0.0546875f), (int) (WIDTH * 0.2f), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
G2.setStroke(new BasicStroke(0.75f));
// Define arrow shape of pointer
final GeneralPath POINTER_WHITE_LEFT = new GeneralPath();
final GeneralPath POINTER_WHITE_RIGHT = new GeneralPath();
POINTER_WHITE_LEFT.moveTo(IMAGE_WIDTH - IMAGE_WIDTH * 0.75f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT / 2.0f);
POINTER_WHITE_LEFT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_LEFT.closePath();
POINTER_WHITE_RIGHT.moveTo(IMAGE_WIDTH * 0.75f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT / 2.0f);
POINTER_WHITE_RIGHT.lineTo(IMAGE_WIDTH / 2.0f, IMAGE_HEIGHT);
POINTER_WHITE_RIGHT.closePath();
final Area POINTER_FRAME_WHITE = new Area(POINTER_WHITE_LEFT);
POINTER_FRAME_WHITE.add(new Area(POINTER_WHITE_RIGHT));
final Color STROKE_COLOR = getBackgroundColor().SYMBOL_COLOR.darker();
final Color FILL_COLOR = getBackgroundColor().SYMBOL_COLOR;
G2.setColor(FILL_COLOR);
G2.fill(POINTER_FRAME_WHITE);
G2.setColor(STROKE_COLOR);
G2.draw(POINTER_FRAME_WHITE);
G2.dispose();
return IMAGE;
}
private BufferedImage create_COMPASS_ROSE_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, WIDTH, java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
//G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
//G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
//G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
//G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
//final int IMAGE_HEIGHT = IMAGE.getHeight();
// ******************* COMPASS ROSE *************************************************
final Point2D COMPASS_CENTER = new Point2D.Double(IMAGE_WIDTH / 2.0f, IMAGE_WIDTH / 2.0f);
AffineTransform transform = G2.getTransform();
G2.setStroke(new BasicStroke(IMAGE_WIDTH * 0.01953125f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
for (int i = 0; i <= 360; i += 30) {
G2.draw(new Arc2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.263671875f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.263671875f, IMAGE_WIDTH * 0.52734375f, IMAGE_WIDTH * 0.52734375f, i, 15, Arc2D.OPEN));
}
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(0.5f));
java.awt.Shape outerCircle = new Ellipse2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.2734375f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.2734375f, IMAGE_WIDTH * 0.546875f, IMAGE_WIDTH * 0.546875f);
G2.draw(outerCircle);
java.awt.Shape innerCircle = new Ellipse2D.Double(COMPASS_CENTER.getX() - IMAGE_WIDTH * 0.25390625f, COMPASS_CENTER.getY() - IMAGE_WIDTH * 0.25390625f, IMAGE_WIDTH * 0.5078125f, IMAGE_WIDTH * 0.5078125f);
G2.draw(innerCircle);
final java.awt.geom.Line2D LINE = new java.awt.geom.Line2D.Double(COMPASS_CENTER.getX(), IMAGE_WIDTH * 0.4018691589, COMPASS_CENTER.getX(), IMAGE_WIDTH * 0.1495327103);
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(1f));
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 6, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.rotate(Math.PI / 12, COMPASS_CENTER.getX(), COMPASS_CENTER.getY());
G2.draw(LINE);
G2.setTransform(transform);
final BufferedImage BIG_ROSE_POINTER = create_BIG_ROSE_POINTER_Image(IMAGE_WIDTH);
final BufferedImage SMALL_ROSE_POINTER = create_SMALL_ROSE_POINTER_Image(IMAGE_WIDTH);
final Point2D OFFSET = new Point2D.Double(IMAGE_WIDTH * 0.475f, IMAGE_WIDTH * 0.20f);
G2.translate(OFFSET.getX(), OFFSET.getY());
// N
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// NE
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// E
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// SE
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// S
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// SW
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
// W
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(BIG_ROSE_POINTER, 0, 0, this);
// NW
G2.rotate(Math.PI / 4f, COMPASS_CENTER.getX() - OFFSET.getX(), COMPASS_CENTER.getY() - OFFSET.getY());
G2.drawImage(SMALL_ROSE_POINTER, 0, 0, this);
G2.setTransform(transform);
G2.setColor(getBackgroundColor().SYMBOL_COLOR);
G2.setStroke(new BasicStroke(IMAGE_WIDTH * 0.00953125f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL));
G2.draw(new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.1025f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.1025f), IMAGE_WIDTH * 0.205f, IMAGE_WIDTH * 0.205f));
G2.setStroke(new BasicStroke(0.5f));
G2.setColor(getBackgroundColor().SYMBOL_COLOR.darker());
final java.awt.Shape OUTER_ROSE_ELLIPSE = new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.11f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.11f), IMAGE_WIDTH * 0.22f, IMAGE_WIDTH * 0.22f);
G2.draw(OUTER_ROSE_ELLIPSE);
final java.awt.Shape INNER_ROSE_ELLIPSE = new Ellipse2D.Double(COMPASS_CENTER.getX() - (IMAGE_WIDTH * 0.095f), COMPASS_CENTER.getY() - (IMAGE_WIDTH * 0.095f), IMAGE_WIDTH * 0.19f, IMAGE_WIDTH * 0.19f);
G2.draw(INNER_ROSE_ELLIPSE);
// ******************* TICKMARKS ****************************************************
create_TICKMARKS(G2, IMAGE_WIDTH);
G2.dispose();
return IMAGE;
}
private void create_TICKMARKS(final Graphics2D G2, final int IMAGE_WIDTH) {
// Store former transformation
final AffineTransform FORMER_TRANSFORM = G2.getTransform();
final BasicStroke MEDIUM_STROKE = new BasicStroke(0.005859375f * IMAGE_WIDTH, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
final BasicStroke THIN_STROKE = new BasicStroke(0.00390625f * IMAGE_WIDTH, BasicStroke.CAP_ROUND, BasicStroke.JOIN_BEVEL);
final java.awt.Font BIG_FONT = new java.awt.Font("Serif", java.awt.Font.PLAIN, (int) (0.12f * IMAGE_WIDTH));
final java.awt.Font SMALL_FONT = new java.awt.Font("Serif", java.awt.Font.PLAIN, (int) (0.06f * IMAGE_WIDTH));
final float TEXT_DISTANCE = 0.0750f * IMAGE_WIDTH;
final float MIN_LENGTH = 0.015625f * IMAGE_WIDTH;
final float MED_LENGTH = 0.0234375f * IMAGE_WIDTH;
final float MAX_LENGTH = 0.03125f * IMAGE_WIDTH;
final Color TEXT_COLOR = getBackgroundColor().LABEL_COLOR;
final Color TICK_COLOR = getBackgroundColor().LABEL_COLOR;
// Create the watch itself
final float RADIUS = IMAGE_WIDTH * 0.38f;
final Point2D COMPASS_CENTER = new Point2D.Double(IMAGE_WIDTH / 2.0f, IMAGE_WIDTH / 2.0f);
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
// Draw ticks
Point2D innerPoint;
Point2D outerPoint;
Point2D textPoint = null;
java.awt.geom.Line2D tick;
int tickCounter90 = 0;
int tickCounter15 = 0;
int tickCounter5 = 0;
int counter = 0;
double sinValue = 0;
double cosValue = 0;
final double STEP = (2.0d * Math.PI) / (360.0d);
for (double alpha = 2 * Math.PI; alpha >= 0; alpha -= STEP) {
G2.setStroke(THIN_STROKE);
sinValue = Math.sin(alpha);
cosValue = Math.cos(alpha);
G2.setColor(TICK_COLOR);
if (tickCounter5 == 5) {
G2.setStroke(THIN_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MIN_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MIN_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter5 = 0;
}
// Different tickmark every 15 units
if (tickCounter15 == 15) {
G2.setStroke(THIN_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MED_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MED_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter15 = 0;
tickCounter90 += 15;
}
// Different tickmark every 90 units plus text
if (tickCounter90 == 90) {
G2.setStroke(MEDIUM_STROKE);
innerPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - MAX_LENGTH) * sinValue, COMPASS_CENTER.getY() + (RADIUS - MAX_LENGTH) * cosValue);
outerPoint = new Point2D.Double(COMPASS_CENTER.getX() + RADIUS * sinValue, COMPASS_CENTER.getY() + RADIUS * cosValue);
// Draw ticks
tick = new java.awt.geom.Line2D.Double(innerPoint.getX(), innerPoint.getY(), outerPoint.getX(), outerPoint.getY());
G2.draw(tick);
tickCounter90 = 0;
}
// Draw text
G2.setFont(BIG_FONT);
G2.setColor(TEXT_COLOR);
textPoint = new Point2D.Double(COMPASS_CENTER.getX() + (RADIUS - TEXT_DISTANCE) * sinValue, COMPASS_CENTER.getY() + (RADIUS - TEXT_DISTANCE) * cosValue);
switch (counter) {
case 360:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "S", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 45:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "SW", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 90:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "W", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 135:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "NW", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 180:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "N", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 225:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "NE", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 270:
G2.setFont(BIG_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "E", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
case 315:
G2.setFont(SMALL_FONT);
G2.fill(UTIL.rotateTextAroundCenter(G2, "SE", (int) textPoint.getX(), (int) textPoint.getY(), (Math.PI - alpha)));
break;
}
G2.setTransform(FORMER_TRANSFORM);
tickCounter5++;
tickCounter15++;
counter++;
}
// Restore former transformation
G2.setTransform(FORMER_TRANSFORM);
}
@Override
protected BufferedImage create_POINTER_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, WIDTH, java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
switch (getPointerType()) {
case TYPE2:
final GeneralPath NORTHPOINTER2 = new GeneralPath();
NORTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER2.moveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.closePath();
final Point2D NORTHPOINTER2_START = new Point2D.Double(NORTHPOINTER2.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER2_STOP = new Point2D.Double(NORTHPOINTER2.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER2_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER2_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER2_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER2_START, NORTHPOINTER2_STOP, NORTHPOINTER2_FRACTIONS, NORTHPOINTER2_COLORS);
G2.setPaint(NORTHPOINTER2_GRADIENT);
G2.fill(NORTHPOINTER2);
G2.setColor(getPointerColor().DARK);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(NORTHPOINTER2);
final GeneralPath SOUTHPOINTER2 = new GeneralPath();
SOUTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER2.moveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.closePath();
final Point2D SOUTHPOINTER2_START = new Point2D.Double(SOUTHPOINTER2.getBounds2D().getMinX(), 0);
final Point2D SOUTHPOINTER2_STOP = new Point2D.Double(SOUTHPOINTER2.getBounds2D().getMaxX(), 0);
final float[] SOUTHPOINTER2_FRACTIONS = {
0.0f,
0.48f,
0.48009998f,
1.0f
};
final Color[] SOUTHPOINTER2_COLORS = {
new Color(227, 229, 232, 255),
new Color(227, 229, 232, 255),
new Color(171, 177, 184, 255),
new Color(171, 177, 184, 255)
};
final java.awt.LinearGradientPaint SOUTHPOINTER2_GRADIENT = new java.awt.LinearGradientPaint(SOUTHPOINTER2_START, SOUTHPOINTER2_STOP, SOUTHPOINTER2_FRACTIONS, SOUTHPOINTER2_COLORS);
G2.setPaint(SOUTHPOINTER2_GRADIENT);
G2.fill(SOUTHPOINTER2);
G2.setColor(new Color(0xABB1B8));
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(SOUTHPOINTER2);
break;
case TYPE3:
final GeneralPath NORTHPOINTER3 = new GeneralPath();
NORTHPOINTER3.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER3.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5560747663551402);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.closePath();
final Point2D NORTHPOINTER3_START = new Point2D.Double(NORTHPOINTER3.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER3_STOP = new Point2D.Double(NORTHPOINTER3.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER3_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER3_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER3_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER3_START, NORTHPOINTER3_STOP, NORTHPOINTER3_FRACTIONS, NORTHPOINTER3_COLORS);
G2.setPaint(NORTHPOINTER3_GRADIENT);
G2.fill(NORTHPOINTER3);
break;
case TYPE1:
default:
final GeneralPath NORTHPOINTER1 = new GeneralPath();
NORTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4953271028037383);
NORTHPOINTER1.closePath();
final Point2D NORTHPOINTER1_START = new Point2D.Double(NORTHPOINTER1.getBounds2D().getMinX(), 0);
final Point2D NORTHPOINTER1_STOP = new Point2D.Double(NORTHPOINTER1.getBounds2D().getMaxX(), 0);
final float[] NORTHPOINTER1_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] NORTHPOINTER1_COLORS = {
getPointerColor().LIGHT,
getPointerColor().LIGHT,
getPointerColor().MEDIUM,
getPointerColor().MEDIUM
};
final java.awt.LinearGradientPaint NORTHPOINTER1_GRADIENT = new java.awt.LinearGradientPaint(NORTHPOINTER1_START, NORTHPOINTER1_STOP, NORTHPOINTER1_FRACTIONS, NORTHPOINTER1_COLORS);
G2.setPaint(NORTHPOINTER1_GRADIENT);
G2.fill(NORTHPOINTER1);
G2.setColor(getPointerColor().DARK);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(NORTHPOINTER1);
final GeneralPath SOUTHPOINTER1 = new GeneralPath();
SOUTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.lineTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5046728971962616);
SOUTHPOINTER1.closePath();
final Point2D SOUTHPOINTER1_START = new Point2D.Double(SOUTHPOINTER1.getBounds2D().getMinX(), 0);
final Point2D SOUTHPOINTER1_STOP = new Point2D.Double(SOUTHPOINTER1.getBounds2D().getMaxX(), 0);
final float[] SOUTHPOINTER1_FRACTIONS = {
0.0f,
0.4999f,
0.5f,
1.0f
};
final Color[] SOUTHPOINTER1_COLORS = {
new Color(227, 229, 232, 255),
new Color(227, 229, 232, 255),
new Color(171, 177, 184, 255),
new Color(171, 177, 184, 255)
};
final java.awt.LinearGradientPaint SOUTHPOINTER1_GRADIENT = new java.awt.LinearGradientPaint(SOUTHPOINTER1_START, SOUTHPOINTER1_STOP, SOUTHPOINTER1_FRACTIONS, SOUTHPOINTER1_COLORS);
G2.setPaint(SOUTHPOINTER1_GRADIENT);
G2.fill(SOUTHPOINTER1);
final Color STROKE_COLOR_SOUTHPOINTER1 = new Color(0xABB1B8);
G2.setColor(STROKE_COLOR_SOUTHPOINTER1);
G2.setStroke(new BasicStroke(1.0f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER));
G2.draw(SOUTHPOINTER1);
break;
}
G2.dispose();
return IMAGE;
}
@Override
protected BufferedImage create_POINTER_SHADOW_Image(final int WIDTH) {
if (WIDTH <= 0) {
return null;
}
final BufferedImage IMAGE = UTIL.createImage(WIDTH, (int) (1.0 * WIDTH), java.awt.Transparency.TRANSLUCENT);
final Graphics2D G2 = IMAGE.createGraphics();
G2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
G2.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE);
G2.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY);
G2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
G2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
final int IMAGE_WIDTH = IMAGE.getWidth();
final int IMAGE_HEIGHT = IMAGE.getHeight();
final Color SHADOW_COLOR = new Color(0.0f, 0.0f, 0.0f, 0.65f);
switch (getPointerType()) {
case TYPE2:
final GeneralPath NORTHPOINTER2 = new GeneralPath();
NORTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER2.moveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.46261682242990654, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.4532710280373832);
NORTHPOINTER2.closePath();
final GeneralPath SOUTHPOINTER2 = new GeneralPath();
SOUTHPOINTER2.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER2.moveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8504672897196262, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5467289719626168, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER2.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4532710280373832, IMAGE_HEIGHT * 0.5373831775700935, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5467289719626168);
SOUTHPOINTER2.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER2);
G2.fill(SOUTHPOINTER2);
break;
case TYPE3:
final GeneralPath NORTHPOINTER3 = new GeneralPath();
NORTHPOINTER3.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER3.moveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.4439252336448598, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5560747663551402);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5560747663551402, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.5);
NORTHPOINTER3.curveTo(IMAGE_WIDTH * 0.5560747663551402, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.14953271028037382);
NORTHPOINTER3.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER3);
break;
case TYPE1:
default:
final GeneralPath NORTHPOINTER1 = new GeneralPath();
NORTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
NORTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.514018691588785, IMAGE_HEIGHT * 0.45794392523364486, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.4158878504672897, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.40186915887850466);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5046728971962616, IMAGE_HEIGHT * 0.38317757009345793, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.1308411214953271, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.38317757009345793, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.397196261682243);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.4158878504672897, IMAGE_WIDTH * 0.48598130841121495, IMAGE_HEIGHT * 0.45794392523364486, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.5327102803738317);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5327102803738317, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5);
NORTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.49065420560747663, IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4719626168224299);
NORTHPOINTER1.closePath();
final GeneralPath SOUTHPOINTER1 = new GeneralPath();
SOUTHPOINTER1.setWindingRule(Path2D.WIND_EVEN_ODD);
SOUTHPOINTER1.moveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.514018691588785, IMAGE_HEIGHT * 0.5420560747663551, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.5841121495327103, IMAGE_WIDTH * 0.5093457943925234, IMAGE_HEIGHT * 0.602803738317757);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5046728971962616, IMAGE_HEIGHT * 0.616822429906542, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.8691588785046729, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.616822429906542, IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.602803738317757);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.49065420560747663, IMAGE_HEIGHT * 0.5841121495327103, IMAGE_WIDTH * 0.48598130841121495, IMAGE_HEIGHT * 0.5420560747663551, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4719626168224299, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5093457943925234, IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.4672897196261682, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.48130841121495327, IMAGE_HEIGHT * 0.4672897196261682, IMAGE_WIDTH * 0.5, IMAGE_HEIGHT * 0.4672897196261682);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.4672897196261682, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.48130841121495327, IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5);
SOUTHPOINTER1.curveTo(IMAGE_WIDTH * 0.5327102803738317, IMAGE_HEIGHT * 0.5093457943925234, IMAGE_WIDTH * 0.5280373831775701, IMAGE_HEIGHT * 0.5186915887850467, IMAGE_WIDTH * 0.5186915887850467, IMAGE_HEIGHT * 0.5280373831775701);
SOUTHPOINTER1.closePath();
G2.setColor(SHADOW_COLOR);
G2.fill(NORTHPOINTER1);
G2.fill(SOUTHPOINTER1);
break;
}
G2.dispose();
return IMAGE;
}
// </editor-fold>
@Override
public String toString() {
return "Compass";
}
}
| Minor edit
Same code was used to implement two clauses of a switch statement: coalesced such two cases into one. | src/main/java/eu/hansolo/steelseries/extras/Compass.java | Minor edit | <ide><path>rc/main/java/eu/hansolo/steelseries/extras/Compass.java
<ide>
<ide> if (isFrameVisible()) {
<ide> switch (getFrameType()) {
<del> case ROUND:
<add> /*case ROUND:
<ide> FRAME_FACTORY.createRadialFrame(GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
<del> break;
<add> break;*/
<ide> case SQUARE:
<ide> FRAME_FACTORY.createLinearFrame(GAUGE_WIDTH, GAUGE_WIDTH, getFrameDesign(), getCustomFrameDesign(), getFrameEffect(), bImage);
<ide> break; |
|
Java | mit | 6460b60243719101e66bcb12768d6339018226d6 | 0 | blackuy/react-native-twilio-video-webrtc,gaston23/react-native-twilio-video-webrtc,blackuy/react-native-twilio-video-webrtc,blackuy/react-native-twilio-video-webrtc | /**
* Component for Twilio Video local views.
* <p>
* Authors:
* Jonathan Chang <[email protected]>
*/
package com.twiliorn.library;
import android.content.Context;
public class TwilioVideoPreview extends RNVideoViewGroup {
private static final String TAG = "TwilioVideoPreview";
public TwilioVideoPreview(Context context) {
super(context);
CustomTwilioVideoView.registerThumbnailVideoView(this.getSurfaceViewRenderer());
this.getSurfaceViewRenderer().applyZOrder(true);
}
}
| android/src/main/java/com/twiliorn/library/TwilioVideoPreview.java | /**
* Component for Twilio Video local views.
* <p>
* Authors:
* Jonathan Chang <[email protected]>
*/
package com.twiliorn.library;
import android.content.Context;
public class TwilioVideoPreview extends RNVideoViewGroup {
private static final String TAG = "TwilioVideoPreview";
public TwilioVideoPreview(Context context) {
super(context);
CustomTwilioVideoView.registerThumbnailVideoView(this.getSurfaceViewRenderer());
this.getSurfaceViewRenderer().setMirror(true);
this.getSurfaceViewRenderer().applyZOrder(true);
}
}
| "On Android, the front camera appears mirrored" Issues Fixed. (#336)
* test
* test
* test
* test
* test
* [UPDATE]: fix mirroring function
* [FIX] front camera flip (mirroring)
* [FIX] remove line
Co-authored-by: Jonathan Chang <[email protected]> | android/src/main/java/com/twiliorn/library/TwilioVideoPreview.java | "On Android, the front camera appears mirrored" Issues Fixed. (#336) | <ide><path>ndroid/src/main/java/com/twiliorn/library/TwilioVideoPreview.java
<ide> public TwilioVideoPreview(Context context) {
<ide> super(context);
<ide> CustomTwilioVideoView.registerThumbnailVideoView(this.getSurfaceViewRenderer());
<del> this.getSurfaceViewRenderer().setMirror(true);
<ide> this.getSurfaceViewRenderer().applyZOrder(true);
<ide> }
<ide> } |
|
Java | epl-1.0 | b74b2e17299dea6e7974c0880dc14af703d80e9b | 0 | agoncal/core,agoncal/core,agoncal/core,pplatek/core,stalep/forge-core,D9110/core,agoncal/core,agoncal/core,pplatek/core,forge/core,forge/core,agoncal/core,agoncal/core,pplatek/core,oscerd/core,pplatek/core,stalep/forge-core,ivannov/core,oscerd/core,ivannov/core,D9110/core,jerr/jbossforge-core,jerr/jbossforge-core,ivannov/core,jerr/jbossforge-core,jerr/jbossforge-core,jerr/jbossforge-core,forge/core,oscerd/core,agoncal/core,forge/core,forge/core,D9110/core,pplatek/core,oscerd/core,D9110/core,ivannov/core,forge/core,jerr/jbossforge-core,D9110/core,ivannov/core,D9110/core,forge/core,ivannov/core,D9110/core,pplatek/core,jerr/jbossforge-core,D9110/core,ivannov/core,oscerd/core,oscerd/core,pplatek/core,forge/core,D9110/core,oscerd/core,oscerd/core,jerr/jbossforge-core,jerr/jbossforge-core,ivannov/core,ivannov/core,ivannov/core,pplatek/core,oscerd/core,pplatek/core,agoncal/core,D9110/core,forge/core,oscerd/core,pplatek/core,forge/core,jerr/jbossforge-core,agoncal/core | /**
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.addon.ui.impl.controller;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.forge.addon.ui.UIRuntime;
import org.jboss.forge.addon.ui.command.CommandExecutionListener;
import org.jboss.forge.addon.ui.command.UICommand;
import org.jboss.forge.addon.ui.context.UIContext;
import org.jboss.forge.addon.ui.controller.CommandController;
import org.jboss.forge.addon.ui.controller.CommandControllerFactory;
import org.jboss.forge.addon.ui.controller.WizardCommandController;
import org.jboss.forge.addon.ui.impl.context.UIExecutionContextImpl;
import org.jboss.forge.addon.ui.impl.context.UINavigationContextImpl;
import org.jboss.forge.addon.ui.input.InputComponent;
import org.jboss.forge.addon.ui.input.UIPrompt;
import org.jboss.forge.addon.ui.metadata.UICommandMetadata;
import org.jboss.forge.addon.ui.output.UIMessage;
import org.jboss.forge.addon.ui.progress.UIProgressMonitor;
import org.jboss.forge.addon.ui.result.CompositeResult;
import org.jboss.forge.addon.ui.result.NavigationResult;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.wizard.UIWizard;
import org.jboss.forge.furnace.addons.AddonRegistry;
import org.jboss.forge.furnace.proxy.Proxies;
/**
*
* Implementation for the {@link WizardCommandController} interface
*
* @author <a href="[email protected]">George Gastaldi</a>
*/
class WizardCommandControllerImpl extends AbstractCommandController implements WizardCommandController
{
private final Logger logger = Logger.getLogger(getClass().getName());
/**
* The execution flow
*/
private final List<WizardStepEntry> flow = new ArrayList<>();
/**
* If there are any subflows, store here
*/
private final LinkedList<WizardStepEntry> subflow = new LinkedList<>();
/**
* The pointer that this flow is on. Starts with 0
*/
private int flowPointer = 0;
private final CommandControllerFactory controllerFactory;
public WizardCommandControllerImpl(UIContext context, AddonRegistry addonRegistry, UIRuntime runtime,
UIWizard initialCommand, CommandControllerFactory controllerFactory)
{
super(addonRegistry, runtime, initialCommand, context);
this.controllerFactory = controllerFactory;
flow.add(createEntry(initialCommand, false));
}
@Override
public void initialize() throws Exception
{
getCurrentController().initialize();
}
@Override
public boolean isInitialized()
{
return getCurrentController().isInitialized();
}
@Override
public Result execute() throws Exception
{
assertInitialized();
UIProgressMonitor progressMonitor = runtime.createProgressMonitor(context);
UIPrompt prompt = runtime.createPrompt(context);
UIExecutionContextImpl executionContext = new UIExecutionContextImpl(context, progressMonitor, prompt);
Set<CommandExecutionListener> listeners = new LinkedHashSet<>();
listeners.addAll(context.getListeners());
for (CommandExecutionListener listener : addonRegistry
.getServices(CommandExecutionListener.class))
{
listeners.add(listener);
}
assertValid();
List<Result> results = new LinkedList<>();
for (WizardStepEntry entry : flow)
{
CommandController controller = entry.controller;
if (progressMonitor.isCancelled())
{
break;
}
UICommand command = controller.getCommand();
try
{
for (CommandExecutionListener listener : listeners)
{
listener.preCommandExecuted(command, executionContext);
}
Result currentResult = command.execute(executionContext);
for (CommandExecutionListener listener : listeners)
{
listener.postCommandExecuted(command, executionContext, currentResult);
}
results.add(currentResult);
}
catch (Exception e)
{
for (CommandExecutionListener listener : listeners)
{
listener.postCommandFailure(command, executionContext, e);
}
throw e;
}
}
return new CompositeResult(results);
}
@Override
public List<UIMessage> validate()
{
return getCurrentController().validate();
}
@Override
public boolean isValid()
{
return getCurrentController().isValid();
}
@Override
public CommandController setValueFor(String inputName, Object value) throws IllegalArgumentException
{
getCurrentController().setValueFor(inputName, value);
return this;
}
@Override
public Object getValueFor(String inputName) throws IllegalArgumentException
{
return getCurrentController().getValueFor(inputName);
}
@Override
public Map<String, InputComponent<?, ?>> getInputs()
{
return getCurrentController().getInputs();
}
@Override
public UICommandMetadata getMetadata()
{
return getCurrentController().getMetadata();
}
@Override
public UICommandMetadata getInitialMetadata()
{
return flow.get(0).controller.getMetadata();
}
@Override
public boolean isEnabled()
{
return getCurrentController().isEnabled();
}
@Override
public UICommand getCommand()
{
return getCurrentController().getCommand();
}
@Override
public void close() throws Exception
{
context.close();
}
@Override
public boolean canMoveToNextStep()
{
assertInitialized();
Class<? extends UICommand>[] next = getNextFrom(getCurrentController().getCommand());
return isValid() && (next != null || !subflow.isEmpty());
}
@Override
public boolean canMoveToPreviousStep()
{
assertInitialized();
return flowPointer > 0;
}
@Override
public boolean canExecute()
{
assertInitialized();
for (WizardStepEntry entry : flow)
{
if (!entry.controller.canExecute())
{
return false;
}
}
// Checking if there is any next page left
CommandController lastController = flow.get(flow.size() - 1).controller;
if (lastController.isInitialized())
{
Class<? extends UICommand>[] next = getNextFrom(flow.get(flow.size() - 1).controller.getCommand());
if (next != null || !subflow.isEmpty())
{
return false;
}
}
else
{
return false;
}
return true;
}
@Override
public WizardCommandController next() throws Exception
{
assertInitialized();
assertValid();
WizardStepEntry currentEntry = getCurrentEntry();
WizardStepEntry nextEntry = getNextEntry();
Class<? extends UICommand>[] result = getNextFrom(currentEntry.controller.getCommand());
if (nextEntry == null)
{
currentEntry.next = result;
addNextFlowStep(result);
}
else
{
// There is already a next page, did the object returned from UICommand.next() changed ?
if (!Arrays.equals(currentEntry.next, result))
{
// Update current entry
currentEntry.next = result;
cleanSubsequentStalePages();
addNextFlowStep(result);
}
else
{
// FORGE-1372- Test if the inputs changed.
final UICommand command;
if (result == null)
{
if (subflow.isEmpty())
{
command = null;
}
else
{
UICommand command2 = Proxies.unwrap(subflow.peek().controller.getCommand());
command = createCommand(command2.getClass());
}
}
else
{
command = createCommand(result[0]);
}
if (command != null)
{
CommandController ctrl = controllerFactory.createController(context, runtime, command);
ctrl.initialize();
Set<String> currentInputsKeySet = nextEntry.controller.getInputs().keySet();
Set<String> keySet = ctrl.getInputs().keySet();
if (!(currentInputsKeySet.containsAll(keySet) && keySet.containsAll(currentInputsKeySet)))
{
cleanSubsequentStalePages();
addNextFlowStep(result);
}
}
}
}
flowPointer++;
return this;
}
/**
*
*/
private void cleanSubsequentStalePages()
{
// Remove subsequent pages and push the subflows back to the stack
Iterator<WizardStepEntry> it = flow.listIterator(flowPointer + 1);
int subflowIdx = 0;
while (it.hasNext())
{
WizardStepEntry entry = it.next();
if (entry.subflowHead && !subflow.contains(entry))
{
subflow.add(subflowIdx++, entry);
}
it.remove();
}
}
/**
* @param result
*/
private void addNextFlowStep(Class<? extends UICommand>[] result)
{
final WizardStepEntry next;
if (result == null)
{
if (subflow.isEmpty())
{
throw new IllegalStateException("No next step found");
}
else
{
next = subflow.pop();
}
}
else
{
next = createEntry(result[0], false);
for (int i = 1; i < result.length; i++)
{
// Save this subflow for later
WizardStepEntry subflowEntry = createEntry(result[i], true);
if (!subflow.contains(subflowEntry))
{
subflow.add(subflowEntry);
}
}
}
flow.add(next);
}
@Override
public WizardCommandController previous() throws IllegalStateException
{
assertInitialized();
if (!canMoveToPreviousStep())
{
throw new IllegalStateException("No previous step found");
}
flowPointer--;
return this;
}
private WizardStepEntry getCurrentEntry()
{
return flow.get(flowPointer);
}
private WizardStepEntry getNextEntry()
{
int nextIdx = flowPointer + 1;
return (nextIdx < flow.size()) ? flow.get(nextIdx) : null;
}
private CommandController getCurrentController()
{
return getCurrentEntry().controller;
}
private WizardStepEntry createEntry(Class<? extends UICommand> commandClass, boolean subflowHead)
{
UICommand command = createCommand(commandClass);
return createEntry(command, subflowHead);
}
private UICommand createCommand(Class<? extends UICommand> commandClass)
{
UICommand command = addonRegistry.getServices(commandClass).get();
return command;
}
private WizardStepEntry createEntry(UICommand command, boolean subflowHead)
{
CommandController controller = controllerFactory.createSingleController(context, runtime, command);
return new WizardStepEntry(controller, subflowHead);
}
private Class<? extends UICommand>[] getNextFrom(UICommand command)
{
Class<? extends UICommand>[] result = null;
if (command instanceof UIWizard)
{
NavigationResult next;
try
{
next = ((UIWizard) command).next(new UINavigationContextImpl(context));
}
catch (Exception e)
{
logger.log(Level.SEVERE, "Cannot fetch the next steps from " + command, e);
next = null;
}
if (next != null)
{
result = next.getNext();
}
}
return result;
}
private static class WizardStepEntry
{
final CommandController controller;
Class<? extends UICommand>[] next;
// If this entry starts a subflow
final boolean subflowHead;
public WizardStepEntry(CommandController controller, boolean subflowHead)
{
this.controller = controller;
this.subflowHead = subflowHead;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((controller == null) ? 0 : controller.hashCode());
result = prime * result + Arrays.hashCode(next);
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
WizardStepEntry other = (WizardStepEntry) obj;
if (controller == null)
{
if (other.controller != null)
return false;
}
else if (!controller.equals(other.controller))
return false;
if (!Arrays.equals(next, other.next))
return false;
return true;
}
}
} | ui/impl/src/main/java/org/jboss/forge/addon/ui/impl/controller/WizardCommandControllerImpl.java | /**
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.addon.ui.impl.controller;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.forge.addon.ui.UIRuntime;
import org.jboss.forge.addon.ui.command.CommandExecutionListener;
import org.jboss.forge.addon.ui.command.UICommand;
import org.jboss.forge.addon.ui.context.UIContext;
import org.jboss.forge.addon.ui.controller.CommandController;
import org.jboss.forge.addon.ui.controller.CommandControllerFactory;
import org.jboss.forge.addon.ui.controller.WizardCommandController;
import org.jboss.forge.addon.ui.impl.context.UIExecutionContextImpl;
import org.jboss.forge.addon.ui.impl.context.UINavigationContextImpl;
import org.jboss.forge.addon.ui.input.InputComponent;
import org.jboss.forge.addon.ui.input.UIPrompt;
import org.jboss.forge.addon.ui.metadata.UICommandMetadata;
import org.jboss.forge.addon.ui.output.UIMessage;
import org.jboss.forge.addon.ui.progress.UIProgressMonitor;
import org.jboss.forge.addon.ui.result.CompositeResult;
import org.jboss.forge.addon.ui.result.NavigationResult;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.wizard.UIWizard;
import org.jboss.forge.furnace.addons.AddonRegistry;
import org.jboss.forge.furnace.proxy.Proxies;
/**
*
* Implementation for the {@link WizardCommandController} interface
*
* @author <a href="[email protected]">George Gastaldi</a>
*/
class WizardCommandControllerImpl extends AbstractCommandController implements WizardCommandController
{
private final Logger logger = Logger.getLogger(getClass().getName());
/**
* The execution flow
*/
private final List<WizardStepEntry> flow = new ArrayList<>();
/**
* If there are any subflows, store here
*/
private final LinkedList<WizardStepEntry> subflow = new LinkedList<>();
/**
* The pointer that this flow is on. Starts with 0
*/
private int flowPointer = 0;
private final CommandControllerFactory controllerFactory;
public WizardCommandControllerImpl(UIContext context, AddonRegistry addonRegistry, UIRuntime runtime,
UIWizard initialCommand, CommandControllerFactory controllerFactory)
{
super(addonRegistry, runtime, initialCommand, context);
this.controllerFactory = controllerFactory;
flow.add(createEntry(initialCommand, false));
}
@Override
public void initialize() throws Exception
{
getCurrentController().initialize();
}
@Override
public boolean isInitialized()
{
return getCurrentController().isInitialized();
}
@Override
public Result execute() throws Exception
{
assertInitialized();
UIProgressMonitor progressMonitor = runtime.createProgressMonitor(context);
UIPrompt prompt = runtime.createPrompt(context);
UIExecutionContextImpl executionContext = new UIExecutionContextImpl(context, progressMonitor, prompt);
Set<CommandExecutionListener> listeners = new LinkedHashSet<>();
listeners.addAll(context.getListeners());
for (CommandExecutionListener listener : addonRegistry
.getServices(CommandExecutionListener.class))
{
listeners.add(listener);
}
assertValid();
List<Result> results = new LinkedList<>();
for (WizardStepEntry entry : flow)
{
CommandController controller = entry.controller;
if (progressMonitor.isCancelled())
{
break;
}
UICommand command = controller.getCommand();
try
{
for (CommandExecutionListener listener : listeners)
{
listener.preCommandExecuted(command, executionContext);
}
Result currentResult = command.execute(executionContext);
for (CommandExecutionListener listener : listeners)
{
listener.postCommandExecuted(command, executionContext, currentResult);
}
results.add(currentResult);
}
catch (Exception e)
{
for (CommandExecutionListener listener : listeners)
{
listener.postCommandFailure(command, executionContext, e);
}
throw e;
}
}
return new CompositeResult(results);
}
@Override
public List<UIMessage> validate()
{
return getCurrentController().validate();
}
@Override
public boolean isValid()
{
return getCurrentController().isValid();
}
@Override
public CommandController setValueFor(String inputName, Object value) throws IllegalArgumentException
{
getCurrentController().setValueFor(inputName, value);
return this;
}
@Override
public Object getValueFor(String inputName) throws IllegalArgumentException
{
return getCurrentController().getValueFor(inputName);
}
@Override
public Map<String, InputComponent<?, ?>> getInputs()
{
return getCurrentController().getInputs();
}
@Override
public UICommandMetadata getMetadata()
{
return getCurrentController().getMetadata();
}
@Override
public UICommandMetadata getInitialMetadata()
{
return flow.get(0).controller.getMetadata();
}
@Override
public boolean isEnabled()
{
return getCurrentController().isEnabled();
}
@Override
public UICommand getCommand()
{
return getCurrentController().getCommand();
}
@Override
public void close() throws Exception
{
context.close();
}
@Override
public boolean canMoveToNextStep()
{
assertInitialized();
Class<? extends UICommand>[] next = getNextFrom(getCurrentController().getCommand());
return (isValid() && (getNextEntry() != null || (next != null || !subflow.isEmpty())));
}
@Override
public boolean canMoveToPreviousStep()
{
assertInitialized();
return flowPointer > 0;
}
@Override
public boolean canExecute()
{
assertInitialized();
for (WizardStepEntry entry : flow)
{
if (!entry.controller.canExecute())
{
return false;
}
}
// Checking if there is any next page left
CommandController lastController = flow.get(flow.size() - 1).controller;
if (lastController.isInitialized())
{
Class<? extends UICommand>[] next = getNextFrom(flow.get(flow.size() - 1).controller.getCommand());
if (next != null || !subflow.isEmpty())
{
return false;
}
}
else
{
return false;
}
return true;
}
@Override
public WizardCommandController next() throws Exception
{
assertInitialized();
assertValid();
WizardStepEntry currentEntry = getCurrentEntry();
WizardStepEntry nextEntry = getNextEntry();
Class<? extends UICommand>[] result = getNextFrom(currentEntry.controller.getCommand());
if (nextEntry == null)
{
currentEntry.next = result;
addNextFlowStep(result);
}
else
{
// There is already a next page, did the object returned from UICommand.next() changed ?
if (!Arrays.equals(currentEntry.next, result))
{
// Update current entry
currentEntry.next = result;
cleanSubsequentStalePages();
addNextFlowStep(result);
}
else
{
// FORGE-1372- Test if the inputs changed.
final UICommand command;
if (result == null)
{
if (subflow.isEmpty())
{
command = null;
}
else
{
UICommand command2 = Proxies.unwrap(subflow.peek().controller.getCommand());
command = createCommand(command2.getClass());
}
}
else
{
command = createCommand(result[0]);
}
if (command != null)
{
CommandController ctrl = controllerFactory.createController(context, runtime, command);
ctrl.initialize();
Set<String> currentInputsKeySet = nextEntry.controller.getInputs().keySet();
Set<String> keySet = ctrl.getInputs().keySet();
if (!(currentInputsKeySet.containsAll(keySet) && keySet.containsAll(currentInputsKeySet)))
{
cleanSubsequentStalePages();
addNextFlowStep(result);
}
}
}
}
flowPointer++;
return this;
}
/**
*
*/
private void cleanSubsequentStalePages()
{
// Remove subsequent pages and push the subflows back to the stack
Iterator<WizardStepEntry> it = flow.listIterator(flowPointer + 1);
int subflowIdx = 0;
while (it.hasNext())
{
WizardStepEntry entry = it.next();
if (entry.subflowHead && !subflow.contains(entry))
{
subflow.add(subflowIdx++, entry);
}
it.remove();
}
}
/**
* @param result
*/
private void addNextFlowStep(Class<? extends UICommand>[] result)
{
final WizardStepEntry next;
if (result == null)
{
if (subflow.isEmpty())
{
throw new IllegalStateException("No next step found");
}
else
{
next = subflow.pop();
}
}
else
{
next = createEntry(result[0], false);
for (int i = 1; i < result.length; i++)
{
// Save this subflow for later
WizardStepEntry subflowEntry = createEntry(result[i], true);
if (!subflow.contains(subflowEntry))
{
subflow.add(subflowEntry);
}
}
}
flow.add(next);
}
@Override
public WizardCommandController previous() throws IllegalStateException
{
assertInitialized();
if (!canMoveToPreviousStep())
{
throw new IllegalStateException("No previous step found");
}
flowPointer--;
return this;
}
private WizardStepEntry getCurrentEntry()
{
return flow.get(flowPointer);
}
private WizardStepEntry getNextEntry()
{
int nextIdx = flowPointer + 1;
return (nextIdx < flow.size()) ? flow.get(nextIdx) : null;
}
private CommandController getCurrentController()
{
return getCurrentEntry().controller;
}
private WizardStepEntry createEntry(Class<? extends UICommand> commandClass, boolean subflowHead)
{
UICommand command = createCommand(commandClass);
return createEntry(command, subflowHead);
}
private UICommand createCommand(Class<? extends UICommand> commandClass)
{
UICommand command = addonRegistry.getServices(commandClass).get();
return command;
}
private WizardStepEntry createEntry(UICommand command, boolean subflowHead)
{
CommandController controller = controllerFactory.createSingleController(context, runtime, command);
return new WizardStepEntry(controller, subflowHead);
}
private Class<? extends UICommand>[] getNextFrom(UICommand command)
{
Class<? extends UICommand>[] result = null;
if (command instanceof UIWizard)
{
NavigationResult next;
try
{
next = ((UIWizard) command).next(new UINavigationContextImpl(context));
}
catch (Exception e)
{
logger.log(Level.SEVERE, "Cannot fetch the next steps from " + command, e);
next = null;
}
if (next != null)
{
result = next.getNext();
}
}
return result;
}
private static class WizardStepEntry
{
final CommandController controller;
Class<? extends UICommand>[] next;
// If this entry starts a subflow
final boolean subflowHead;
public WizardStepEntry(CommandController controller, boolean subflowHead)
{
this.controller = controller;
this.subflowHead = subflowHead;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((controller == null) ? 0 : controller.hashCode());
result = prime * result + Arrays.hashCode(next);
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
WizardStepEntry other = (WizardStepEntry) obj;
if (controller == null)
{
if (other.controller != null)
return false;
}
else if (!controller.equals(other.controller))
return false;
if (!Arrays.equals(next, other.next))
return false;
return true;
}
}
} | FORGE-1459: Fixed canMoveToNextStep implementation | ui/impl/src/main/java/org/jboss/forge/addon/ui/impl/controller/WizardCommandControllerImpl.java | FORGE-1459: Fixed canMoveToNextStep implementation | <ide><path>i/impl/src/main/java/org/jboss/forge/addon/ui/impl/controller/WizardCommandControllerImpl.java
<ide> {
<ide> assertInitialized();
<ide> Class<? extends UICommand>[] next = getNextFrom(getCurrentController().getCommand());
<del> return (isValid() && (getNextEntry() != null || (next != null || !subflow.isEmpty())));
<add> return isValid() && (next != null || !subflow.isEmpty());
<ide> }
<ide>
<ide> @Override |
|
Java | mit | 05c9e01c3e3817e83cb24450bbe3cb35cf500e0a | 0 | PLOS/wombat,PLOS/wombat,PLOS/wombat,PLOS/wombat | /*
* $HeadURL$
* $Id$
* Copyright (c) 2006-2013 by Public Library of Science http://plos.org http://ambraproject.org
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ambraproject.wombat.controller;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import org.ambraproject.wombat.config.site.Site;
import org.ambraproject.wombat.config.site.SiteParam;
import org.ambraproject.wombat.config.site.SiteSet;
import org.ambraproject.wombat.feed.ArticleFeedView;
import org.ambraproject.wombat.feed.FeedMetadataField;
import org.ambraproject.wombat.feed.FeedType;
import org.ambraproject.wombat.model.JournalFilterType;
import org.ambraproject.wombat.model.SearchFilter;
import org.ambraproject.wombat.model.SearchFilterItem;
import org.ambraproject.wombat.model.SingletonSearchFilterType;
import org.ambraproject.wombat.model.TaxonomyGraph;
import org.ambraproject.wombat.service.BrowseTaxonomyService;
import org.ambraproject.wombat.service.SolrArticleAdapter;
import org.ambraproject.wombat.service.remote.ArticleSearchQuery;
import org.ambraproject.wombat.service.remote.SearchFilterService;
import org.ambraproject.wombat.service.remote.ServiceRequestException;
import org.ambraproject.wombat.service.remote.SolrSearchService;
import org.ambraproject.wombat.service.remote.SolrSearchServiceImpl;
import org.ambraproject.wombat.util.ListUtil;
import org.ambraproject.wombat.util.UrlParamBuilder;
import org.apache.commons.lang.WordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Controller class for user-initiated searches.
*/
@Controller
public class SearchController extends WombatController {
private static final Logger log = LoggerFactory.getLogger(SearchController.class);
@Autowired
private SiteSet siteSet;
@Autowired
private SolrSearchService solrSearchService;
@Autowired
private SearchFilterService searchFilterService;
@Autowired
private BrowseTaxonomyService browseTaxonomyService;
@Autowired
private ArticleFeedView articleFeedView;
private final String BROWSE_RESULTS_PER_PAGE = "13";
/**
* Class that encapsulates the parameters that are shared across many different search types. For example, a subject
* search and an advanced search will have many parameters in common, such as sort order, date range, page, results
* per page, etc. This class eliminates the need to have long lists of @RequestParam parameters duplicated across
* many controller methods.
* <p>
* This class also contains logic having to do with which parameters take precedence over others, defaults when
* parameters are absent, and the like.
*/
@VisibleForTesting
static final class CommonParams {
private enum AdvancedSearchTerms {
EVERYTHING("everything:"),
TITLE("title:"),
AUTHOR("author:"),
BODY("body:"),
ABSTRACT("abstract:"),
SUBJECT("subject:"),
PUBLICATION_DATE("publication_date:"),
ACCEPTED_DATE("accepted_date:"),
ID("id:"),
ARTICLE_TYPE("article_type:"),
AUTHOR_AFFILIATE("author_affiliate:"),
COMPETING_INTEREST("competing_interest:"),
CONCLUSIONS("conclusions:"),
EDITOR("editor:"),
ELOCATION_ID("elocation_id:"),
FIGURE_TABLE_CAPTION("figure_table_caption:"),
FINANCIAL_DISCLOSURE("financial_disclosure:"),
INTRODUCTION("introduction:"),
ISSUE("issue:"),
MATERIALS_AND_METHODS("materials_and_methods:"),
RECEIVED_DATE("received_date:"),
REFERENCE("reference:"),
RESULTS_AND_DISCUSSION("results_and_discussion:"),
SUPPORTING_INFORMATION("supporting_information:"),
TRIAL_REGISTRATION("trial_registration:"),
VOLUME("volume:");
private final String text;
private AdvancedSearchTerms(final String text) {
this.text = text;
}
@Override
public String toString() {
return text;
}
}
/**
* The number of the first desired result (zero-based) that will be passed to solr. Calculated from the page and
* resultsPerPage URL parameters.
*/
int start;
SolrSearchServiceImpl.SolrSortOrder sortOrder;
SolrSearchService.SearchCriterion dateRange;
List<String> articleTypes;
List<String> journalKeys;
@VisibleForTesting
Set<String> filterJournalNames;
@VisibleForTesting
List<String> subjectList;
List<String> authors;
List<String> sections;
/**
* Indicates whether any filter parameters are being applied to the search (journal, subject area, etc).
*/
@VisibleForTesting
boolean isFiltered;
private SiteSet siteSet;
private Site site;
private int resultsPerPage;
private String startDate;
private String endDate;
private final String DEFAULT_START_DATE = "2003-01-01";
// doesn't include journal and date filter param names
static final Set<String> FILTER_PARAMETER_NAMES = Stream.of(SingletonSearchFilterType.values()).map
(SingletonSearchFilterType::getParameterName).collect(Collectors.toSet());
/**
* Constructor.
*
* @param siteSet siteSet associated with the request
* @param site site of the request
*/
CommonParams(SiteSet siteSet, Site site) {
this.siteSet = siteSet;
this.site = site;
}
/**
* Extracts parameters from the raw parameter map, and performs some logic related to what parameters take
* precedence and default values when ones aren't present.
*
* @param params
* @throws IOException
*/
void parseParams(Map<String, List<String>> params) throws IOException {
String pageParam = getSingleParam(params, "page", null);
resultsPerPage = Integer.parseInt(getSingleParam(params, "resultsPerPage", "15"));
if (pageParam != null) {
int page = Integer.parseInt(pageParam);
start = (page - 1) * resultsPerPage;
}
sortOrder = SolrSearchServiceImpl.SolrSortOrder.RELEVANCE;
String sortOrderParam = getSingleParam(params, "sortOrder", null);
if (!Strings.isNullOrEmpty(sortOrderParam)) {
sortOrder = SolrSearchServiceImpl.SolrSortOrder.valueOf(sortOrderParam);
}
dateRange = parseDateRange(getSingleParam(params, "dateRange", null),
getSingleParam(params, "filterStartDate", null), getSingleParam(params, "filterEndDate", null));
journalKeys = ListUtil.isNullOrEmpty(params.get("filterJournals"))
? new ArrayList<String>() : params.get("filterJournals");
filterJournalNames = new HashSet<>();
for (String journalKey : journalKeys) {
filterJournalNames.add(siteSet.getJournalNameFromKey(journalKey));
}
startDate = getSingleParam(params, "filterStartDate", null);
endDate = getSingleParam(params, "filterEndDate", null);
if (startDate == null && endDate != null) {
startDate = DEFAULT_START_DATE;
} else if (startDate != null && endDate == null) {
endDate = new SimpleDateFormat("yyyy-MM-dd").format(Calendar.getInstance().getTime());
}
subjectList = parseSubjects(getSingleParam(params, "subject", null), params.get("filterSubjects"));
articleTypes = params.get("filterArticleTypes");
articleTypes = articleTypes == null ? new ArrayList<String>() : articleTypes;
authors = ListUtil.isNullOrEmpty(params.get("filterAuthors"))
? new ArrayList<String>() : params.get("filterAuthors");
sections = ListUtil.isNullOrEmpty(params.get("filterSections"))
? new ArrayList<String>() : params.get("filterSections");
isFiltered = !filterJournalNames.isEmpty() || !subjectList.isEmpty() || !articleTypes.isEmpty()
|| dateRange != SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME || !authors.isEmpty()
|| startDate != null || endDate != null || !sections.isEmpty();
}
/**
* Adds parameters (and derived values) back to the model needed for results page rendering. This only adds model
* attributes that are shared amongst different types of searches; it is the caller's responsibility to add the
* search results and any other data needed.
*
* @param model model that will be passed to the template
* @param request HttpServletRequest
*/
void addToModel(Model model, HttpServletRequest request) {
model.addAttribute("resultsPerPage", resultsPerPage);
model.addAttribute("filterJournalNames", filterJournalNames);
// TODO: split or share model assignments between mobile and desktop.
model.addAttribute("filterJournals", journalKeys);
model.addAttribute("filterStartDate", startDate);
model.addAttribute("filterEndDate", endDate);
model.addAttribute("filterSubjects", subjectList);
model.addAttribute("filterArticleTypes", articleTypes);
model.addAttribute("filterAuthors", authors);
model.addAttribute("filterSections", sections);
// TODO: bind sticky form params using Spring MVC support for Freemarker. I think we have to add
// some more dependencies to do this. See
// http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/view.html#view-velocity
model.addAttribute("selectedSortOrder", sortOrder);
model.addAttribute("selectedDateRange", dateRange);
model.addAttribute("selectedResultsPerPage", resultsPerPage);
model.addAttribute("isFiltered", isFiltered);
// We pass in the request parameters here, because they are needed by paging.ftl.
// The normal way to get request parameters from a freemarker template is to use the
// RequestParameters variable, but due to a bug in freemarker, this does not handle
// multi-valued parameters correctly. See http://sourceforge.net/p/freemarker/bugs/324/
Map<String, String[]> parameterMap = request.getParameterMap();
model.addAttribute("parameterMap", parameterMap);
Map<String, String[]> clearDateFilterParams = new HashMap<>();
clearDateFilterParams.putAll(parameterMap);
clearDateFilterParams.remove("filterStartDate");
clearDateFilterParams.remove("filterEndDate");
model.addAttribute("dateClearParams", clearDateFilterParams);
Map<String, String[]> clearAllFilterParams = new HashMap<>();
clearAllFilterParams.putAll(clearDateFilterParams);
clearAllFilterParams.remove("filterJournals");
clearAllFilterParams.remove("filterSubjects");
clearAllFilterParams.remove("filterAuthors");
clearAllFilterParams.remove("filterSections");
clearAllFilterParams.remove("filterArticleTypes");
model.addAttribute("clearAllFilterParams", clearAllFilterParams);
}
private String getSingleParam(Map<String, List<String>> params, String key, String defaultValue) {
List<String> values = params.get(key);
return values == null || values.isEmpty() ? defaultValue
: values.get(0) == null || values.get(0).isEmpty() ? defaultValue : values.get(0);
}
/**
* Determines which publication dates to filter by in the search. If no dates are input, a default date range of All
* Time will be used. Mobile search only provides the enumerated dateRangeParam field, while desktop search provides
* explicit fields for start and end dates. The parameters are mutually exclusive.
*
* @param dateRangeParam mobile date range enumeration value
* @param startDate desktop start date value
* @param endDate desktop end date value
* @return A generic @SearchCriterion object used by Solr
*/
private SolrSearchService.SearchCriterion parseDateRange(String dateRangeParam, String startDate, String endDate) {
SolrSearchService.SearchCriterion dateRange = SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME;
if (!Strings.isNullOrEmpty(dateRangeParam)) {
dateRange = SolrSearchServiceImpl.SolrEnumeratedDateRange.valueOf(dateRangeParam);
} else if (!Strings.isNullOrEmpty(startDate) && !Strings.isNullOrEmpty(endDate)) {
dateRange = new SolrSearchServiceImpl.SolrExplicitDateRange("explicit date range", startDate,
endDate);
}
return dateRange;
}
/**
* subject is a mobile-only parameter, while subjects is a desktop-only parameter
*
* @param subject mobile subject area value
* @param subjects desktop list of subject area values
* @return singleton list of subject if subjects is null or empty, else return subjects
*/
private List<String> parseSubjects(String subject, List<String> subjects) {
if (Strings.isNullOrEmpty(subject) && subjects != null && subjects.size() > 0) {
return subjects;
} else {
return subject != null ? Collections.singletonList(subject) : new ArrayList<String>();
}
}
private ArticleSearchQuery.Builder fill(ArticleSearchQuery.Builder builder) {
return builder
.setJournalKeys(journalKeys)
.setArticleTypes(articleTypes)
.setSubjects(subjectList)
.setAuthors(authors)
.setSections(sections)
.setStart(start)
.setRows(resultsPerPage)
.setSortOrder(sortOrder)
.setDateRange(dateRange)
.setStartDate(startDate)
.setEndDate(endDate);
}
private static final ImmutableMap<String, Function<CommonParams, List<String>>> FILTER_KEYS_TO_FIELDS =
ImmutableMap.<String, Function<CommonParams, List<String>>>builder()
.put(JournalFilterType.JOURNAL_FILTER_MAP_KEY, params -> params.journalKeys)
.put(SingletonSearchFilterType.ARTICLE_TYPE.getFilterMapKey(), params -> params.articleTypes)
.put(SingletonSearchFilterType.SUBJECT_AREA.getFilterMapKey(), params -> params.subjectList)
.put(SingletonSearchFilterType.AUTHOR.getFilterMapKey(), params -> params.authors)
.put(SingletonSearchFilterType.SECTION.getFilterMapKey(), params -> params.sections)
.build();
/**
* Examine incoming URL parameters to see which filter items are active. CommonParams contains
* journalKeys, articleTypes, subjectList, authors, and sections parsed from request params.
* Check each string in these lists against their applicable filters.
*
* @param filter the search filter to examine
*/
public void setActiveAndInactiveFilterItems(SearchFilter filter) {
String filterMapKey = filter.getFilterTypeMapKey();
Function<CommonParams, List<String>> getter = FILTER_KEYS_TO_FIELDS.get(filterMapKey);
if (getter == null) {
throw new RuntimeException("Search Filter not configured with sane map key: " + filterMapKey);
}
filter.setActiveAndInactiveFilterItems(getter.apply(this));
}
/**
* Creates an instance of {SearchFilterItem} for active filters using url parameters
*
* @param activeFilterItems set of active filter items
* @param parameterMap request's query parameter
* @param filterName name of the filter
* @param filterValues values of the filter
*/
private void buildActiveFilterItems(Set<SearchFilterItem> activeFilterItems, Map<String,
String[]> parameterMap, String filterName, String[] filterValues) {
for (String filterValue : filterValues) {
List<String> filterValueList = new ArrayList<>(Arrays.asList(filterValues));
Map<String, List<String>> queryParamMap = new HashMap<>();
// covert Map<String, String[]> to Map<String, List<String> for code re-usability
queryParamMap.putAll(parameterMap.entrySet().stream().collect(Collectors.toMap(entry -> entry
.getKey(), entry -> new ArrayList<>(Arrays.asList(entry.getValue())))));
queryParamMap.remove(filterName);
// include the rest of filter values for that specific filter
if (filterValueList.size() > 1) {
filterValueList.remove(filterValue);
queryParamMap.put(filterName, filterValueList);
}
String displayName;
if (filterName.equals("filterJournals")) {
displayName = siteSet.getJournalNameFromKey(filterValue);
} else {
displayName = filterValue;
}
SearchFilterItem filterItem = new SearchFilterItem(displayName, 0,
filterName, filterValue, queryParamMap);
activeFilterItems.add(filterItem);
}
}
/**
* Examine the incoming URL when there is no search result and set the active filters
*
* @return set of active filters
*/
public Set<SearchFilterItem> setActiveFilterParams(Model model, HttpServletRequest request) {
Map<String, String[]> parameterMap = request.getParameterMap();
model.addAttribute("parameterMap", parameterMap);
// exclude non-filter query parameters
Map<String, String[]> filtersOnlyMap = parameterMap.entrySet().stream()
.filter(entry -> FILTER_PARAMETER_NAMES.contains(entry.getKey())
|| ("filterJournals").equals(entry.getKey()))
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
Set<SearchFilterItem> activeFilterItems = new LinkedHashSet<>();
filtersOnlyMap.forEach((filterName, filterValues) -> buildActiveFilterItems(activeFilterItems,
parameterMap, filterName, filterValues));
return activeFilterItems;
}
/**
* @param query the incoming query string
* @return True if the query string does not contain any advanced search terms,
* listed in {@link AdvancedSearchTerms}
*/
private boolean isSimpleSearch(String query) {
return Arrays.stream(AdvancedSearchTerms.values()).noneMatch(e -> query.contains(e.text));
}
}
/**
* Examine the current @code{ArticleSearchQuery} object and build a single URL parameter
* string to append to the current search URL.
*
* @param q the search query to rebuild search URL parameters from
* @return ImmutableListMultimap that contains the URL parameter list
*/
private static ImmutableListMultimap<String, String> rebuildUrlParameters(ArticleSearchQuery q) {
Preconditions.checkArgument(!q.isForRawResults());
Preconditions.checkArgument(!q.getFacet().isPresent());
ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder();
builder.put(q.isSimple() ? "q" : "unformattedQuery", q.getQuery().or(""));
int rows = q.getRows();
builder.put("resultsPerPage", Integer.toString(rows));
if (rows > 0) {
int page = q.getStart() / rows + 1;
builder.put("page", Integer.toString(page));
}
builder.putAll("filterJournals", q.getJournalKeys());
builder.putAll("filterSubjects", q.getSubjects());
builder.putAll("filterAuthors", q.getAuthors());
builder.putAll("filterSections", q.getSections());
builder.putAll("filterArticleTypes", q.getArticleTypes());
builder.putAll("filterStartDate", q.getStartDate() == null ? "" : q.getStartDate());
builder.putAll("filterEndDate", q.getEndDate() == null ? "" : q.getEndDate());
// TODO: Support dateRange. Note this is different from startDate and endDate
// TODO: Support sortOrder
for (Map.Entry<String, String> entry : q.getRawParameters().entrySet()) {
builder.put(entry);
}
return builder.build();
}
private CommonParams modelCommonParams(HttpServletRequest request, Model model,
@SiteParam Site site, @RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = new CommonParams(siteSet, site);
commonParams.parseParams(params);
commonParams.addToModel(model, request);
addOptionsToModel(model);
return commonParams;
}
/**
* Performs a simple search and serves the result as XML to be read by an RSS reader
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params search parameters identical to the {@code search} method
* @return RSS view of articles returned by the search
* @throws IOException
*/
@RequestMapping(name = "searchFeed", value = "/search/feed/{feedType:atom|rss}",
params = {"q", "!volume", "!subject"}, method = RequestMethod.GET)
public ModelAndView getSearchRssFeedView(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String feedType, @RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
String queryString = params.getFirst("q");
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery(queryString)
.setSimple(commonParams.isSimpleSearch(queryString))
.setIsRssSearch(true);
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<String, ?> searchResults = solrSearchService.search(queryObj);
String feedTitle = representQueryParametersAsString(params);
return getFeedModelAndView(site, feedType, feedTitle, searchResults);
}
/**
* Performs an advanced search and serves the result as XML to be read by an RSS reader
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params search parameters identical to the {@code search} method
* @return RSS view of articles returned by the search
* @throws IOException
*/
@RequestMapping(name = "advancedSearchFeed", value = "/search/feed/{feedType:atom|rss}",
params = {"unformattedQuery", "!volume"}, method = RequestMethod.GET)
public ModelAndView getAdvancedSearchRssFeedView(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String feedType, @RequestParam MultiValueMap<String, String> params) throws IOException {
String queryString = params.getFirst("unformattedQuery");
params.remove("unformattedQuery");
params.add("q", queryString);
return getSearchRssFeedView(request, model, site, feedType, params);
}
private static String representQueryParametersAsString(MultiValueMap<String, String> params) {
UrlParamBuilder builder = UrlParamBuilder.params();
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
String key = entry.getKey();
for (String value : entry.getValue()) {
builder.add(key, value);
}
}
return builder.toString();
}
private ModelAndView getFeedModelAndView(Site site, String feedType, String title, Map<String, ?> searchResults) {
ModelAndView mav = new ModelAndView();
FeedMetadataField.SITE.putInto(mav, site);
FeedMetadataField.FEED_INPUT.putInto(mav, searchResults.get("docs"));
FeedMetadataField.TITLE.putInto(mav, title);
mav.setView(FeedType.getView(articleFeedView, feedType));
return mav;
}
// Unless the "!volume" part is included in the params in the next few methods, you will
// get an "ambiguous handler method" exception from spring. I think this is because all
// of these methods (including volumeSearch) use a MultiValueMap for @RequestParam, instead
// of individually listing the params.
/**
* Performs a "simple" or "advanced" search. The query parameter is read, and if advanced search
* terms are found, an advanced search is performed. Otherwise, a simple search is performed. The
* only difference between simple and advanced searches is the use of dismax in the ultimate
* Solr query.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "simpleSearch", value = "/search", params = {"q", "!volume", "!subject"})
public String search(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
String queryString = params.getFirst("q");
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery(queryString)
.setSimple(commonParams.isSimpleSearch(queryString));
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<?, ?> searchResults;
try {
searchResults = solrSearchService.search(queryObj);
} catch (ServiceRequestException sre) {
return handleFailedSolrRequest(model, site, queryString, sre);
}
model.addAttribute("searchResults", solrSearchService.addArticleLinks(searchResults, request, site, siteSet));
Set<SearchFilterItem> activeFilterItems;
if ((Double) searchResults.get("numFound") == 0.0) {
activeFilterItems = commonParams.setActiveFilterParams(model, request);
} else {
Map<String, SearchFilter> filters = searchFilterService.getSearchFilters(queryObj, rebuildUrlParameters(queryObj));
filters.values().forEach(commonParams::setActiveAndInactiveFilterItems);
activeFilterItems = new LinkedHashSet<>();
filters.values().forEach(filter -> activeFilterItems.addAll(filter.getActiveFilterItems()));
model.addAttribute("searchFilters", filters);
}
model.addAttribute("activeFilterItems", activeFilterItems);
return site.getKey() + "/ftl/search/searchResults";
}
private String handleFailedSolrRequest(Model model, Site site, String queryString,
ServiceRequestException sre) throws IOException {
if (sre.getResponseBody().contains("SyntaxError: Cannot parse")) {
log.info("User attempted invalid search: " + queryString + "\n Exception: " + sre.getMessage());
model.addAttribute("cannotParseQueryError", true);
} else {
log.error("Unknown error returned from Solr: " + sre.getMessage());
model.addAttribute("unknownQueryError", true);
}
return newAdvancedSearch(model, site);
}
/**
* This is a catch for advanced searches originating from Old Ambra. It transforms the
* "unformattedQuery" param into "q" which is used by Wombat's new search.
* todo: remove this method once Old Ambra advanced search is destroyed
*/
@RequestMapping(name = "advancedSearch", value = "/search", params = {"unformattedQuery", "!volume", "!subject"})
public String advancedSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
String queryString = params.getFirst("unformattedQuery");
params.remove("unformattedQuery");
params.add("q", queryString);
return search(request, model, site, params);
}
@RequestMapping(name = "newAdvancedSearch", value = "/search", params = {"!unformattedQuery", "!volume", "!subject"})
public String newAdvancedSearch(Model model, @SiteParam Site site) throws IOException {
model.addAttribute("isNewSearch", true);
model.addAttribute("otherQuery", "");
model.addAttribute("activeFilterItems", new HashSet<>());
return site.getKey() + "/ftl/search/searchResults";
}
/**
* Uses {@link #search(HttpServletRequest, Model, Site, MultiValueMap)} to support the mobile taxonomy
* browser
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "subjectSearch", value = "/search", params = {"subject", "!volume"})
public String subjectSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
params.add("q", "");
return search(request, model, site, params);
}
@RequestMapping(name = "browse", value = "/browse", params = "!filterSubjects")
public String browse(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
subjectAreaSearch(request, model, site, params, "");
return site.getKey() + "/ftl/browseSubjectArea";
}
@RequestMapping(name = "browseSubjectArea", value = "/browse/{subject}", params = "!filterSubjects")
public String browseSubjectArea(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String subject, @RequestParam MultiValueMap<String, String> params)
throws IOException {
subjectAreaSearch(request, model, site, params, subject);
return site.getKey() + "/ftl/browseSubjectArea";
}
// Requests coming from the advanced search form with URLs beginning with "/search/quick/" will always
// have the parameters id, eLocationId, and volume, although only one will be populated. The expressions
// like "id!=" in the following request mappings cause spring to map to controller methods only if
// the corresponding parameters are present, and do not have a value of the empty string.
/**
* Searches for an article having the given doi (the value of the id parameter). If the DOI exists for any journal,
* this method will redirect to the article. Otherwise, an empty search results page will be rendered.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param doi identifies the article
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "doiSearch", value = "/search", params = {"id!="})
public String doiSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam(value = "id", required = true) String doi) throws IOException {
Map<?, ?> searchResults = solrSearchService.lookupArticleByDoi(doi);
return renderSingleResult(searchResults, "doi:" + doi, request, model, site);
}
/**
* Searches for an article having the given eLocationId from the given journal. Note that eLocationIds are only
* unique within journals, so both parameters are necessary. If the article is found, this method will redirect to
* it; otherwise an empty search results page will be rendered.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param eLocationId identifies the article in a journal
* @param journal journal to search within
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "eLocationSearch", value = "/search", params = {"eLocationId!="})
public String eLocationSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam(value = "eLocationId", required = true) String eLocationId,
@RequestParam(value = "filterJournals", required = true) String journal) throws IOException {
Map<?, ?> searchResults = solrSearchService.lookupArticleByELocationId(eLocationId, journal);
return renderSingleResult(searchResults, "elocation_id:" + eLocationId, request, model, site);
}
/**
* Searches for all articles in the volume identified by the value of the volume parameter.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "volumeSearch", value = "/search", params = {"volume!="})
public String volumeSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
int volume;
try {
volume = Integer.parseInt(params.getFirst("volume"));
} catch (NumberFormatException nfe) {
return renderEmptyResults(null, "volume:" + params.getFirst("volume"), model, site);
}
ArticleSearchQuery query = commonParams.fill(ArticleSearchQuery.builder()).build();
Map<?, ?> searchResults = solrSearchService.searchVolume(query, volume);
model.addAttribute("searchResults", solrSearchService.addArticleLinks(searchResults, request, site, siteSet));
model.addAttribute("otherQuery", String.format("volume:%d", volume));
Map<String, SearchFilter> filters = searchFilterService.getVolumeSearchFilters(volume,
commonParams.journalKeys, commonParams.articleTypes, commonParams.dateRange);
filters.values().forEach(commonParams::setActiveAndInactiveFilterItems);
Set<SearchFilterItem> activeFilterItems = new HashSet<>();
filters.values().forEach(filter -> activeFilterItems.addAll(filter.getActiveFilterItems()));
model.addAttribute("searchFilters", filters);
model.addAttribute("activeFilterItems", activeFilterItems);
return site.getKey() + "/ftl/search/searchResults";
}
/**
* Renders either an article page, or an empty search results page.
*
* @param searchResults deserialized JSON that should be either empty, or contain a single article
* @param searchTerm the search term (suitable for display) that was input
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @return String indicating template location
* @throws IOException
*/
private String renderSingleResult(Map<?, ?> searchResults, String searchTerm, HttpServletRequest request, Model model,
Site site) throws IOException {
int numFound = ((Double) searchResults.get("numFound")).intValue();
if (numFound > 1) {
throw new IllegalStateException("Valid DOIs should return exactly one article");
}
if (numFound == 1) {
searchResults = solrSearchService.addArticleLinks(searchResults, request, site, siteSet);
List docs = (List) searchResults.get("docs");
Map doc = (Map) docs.get(0);
return "redirect:" + doc.get("link");
} else {
return renderEmptyResults(searchResults, searchTerm, model, site);
}
}
/**
* Renders an empty search results page.
*
* @param searchResults empty search results. If null, one will be constructed.
* @param searchTerm the search term (suitable for display) that was input
* @param model model that will be passed to the template
* @param site site the request originates from
* @return String indicating template location
*/
private String renderEmptyResults(Map searchResults, String searchTerm, Model model, Site site) {
if (searchResults == null) {
searchResults = new HashMap<>();
searchResults.put("numFound", 0);
}
model.addAttribute("searchResults", searchResults);
model.addAttribute("otherQuery", searchTerm);
addOptionsToModel(model);
// Add minimum model attributes necessary to render the form.
model.addAttribute("selectedSortOrder", SolrSearchServiceImpl.SolrSortOrder.RELEVANCE);
model.addAttribute("selectedDateRange", SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME);
model.addAttribute("isFiltered", false);
model.addAttribute("resultsPerPage", 15);
return site.getKey() + "/ftl/search/searchResults";
}
private void addOptionsToModel(Model model) {
model.addAttribute("sortOrders", SolrSearchServiceImpl.SolrSortOrder.values());
model.addAttribute("dateRanges", SolrSearchServiceImpl.SolrEnumeratedDateRange.values());
}
/**
* Set defaults and performs search for subject area landing page
*
* @param request HTTP request for browsing subject areas
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params HTTP request params
* @param subject the subject area to be search; return all articles if no subject area is provided
* @throws IOException
*/
private void subjectAreaSearch(HttpServletRequest request, Model model, Site site,
MultiValueMap<String, String> params, String subject) throws IOException {
modelSubjectHierarchy(model, site, subject);
String subjectName;
if (Strings.isNullOrEmpty(subject)) {
params.add("subject", "");
subjectName = "All Subject Areas";
} else {
subject = subject.replace("_", " ");
params.add("subject", subject);
subjectName = WordUtils.capitalize(subject);
}
model.addAttribute("subjectName", subjectName);
// set defaults for subject area landing page
if (ListUtil.isNullOrEmpty(params.get("resultsPerPage"))) {
params.add("resultsPerPage", BROWSE_RESULTS_PER_PAGE);
}
if (ListUtil.isNullOrEmpty(params.get("sortOrder"))) {
params.add("sortOrder", "DATE_NEWEST_FIRST");
}
if (ListUtil.isNullOrEmpty(params.get("filterJournals"))) {
params.add("filterJournals", site.getJournalKey());
}
CommonParams commonParams = new CommonParams(siteSet, site);
commonParams.parseParams(params);
commonParams.addToModel(model, request);
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery("")
.setSimple(false);
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<String, ?> searchResults = solrSearchService.search(queryObj);
model.addAttribute("articles", SolrArticleAdapter.unpackSolrQuery(searchResults));
model.addAttribute("searchResults", searchResults);
model.addAttribute("page", commonParams.getSingleParam(params, "page", "1"));
model.addAttribute("journalKey", site.getKey());
}
private void modelSubjectHierarchy(Model model, Site site, String subject) throws IOException {
TaxonomyGraph fullTaxonomyView = browseTaxonomyService.parseCategories(site.getJournalKey());
Set<String> subjectParents;
Set<String> subjectChildren;
if (subject != null && subject.length() > 0) {
//Recreate the category name as stored in the DB
subject = subject.replace("_", " ");
TaxonomyGraph.CategoryView categoryView = fullTaxonomyView.getView(subject);
if (categoryView == null) {
throw new NotFoundException(String.format("category %s does not exist.", subject));
} else {
if (categoryView.getParents().isEmpty()) {
subjectParents = new HashSet<>();
} else {
subjectParents = categoryView.getParents().keySet();
}
subjectChildren = categoryView.getChildren().keySet();
}
} else {
subjectParents = new HashSet<>();
subjectChildren = fullTaxonomyView.getRootCategoryNames();
}
model.addAttribute("subjectParents", subjectParents);
model.addAttribute("subjectChildren", subjectChildren);
}
}
| src/main/java/org/ambraproject/wombat/controller/SearchController.java | /*
* $HeadURL$
* $Id$
* Copyright (c) 2006-2013 by Public Library of Science http://plos.org http://ambraproject.org
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ambraproject.wombat.controller;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import org.ambraproject.wombat.config.site.Site;
import org.ambraproject.wombat.config.site.SiteParam;
import org.ambraproject.wombat.config.site.SiteSet;
import org.ambraproject.wombat.feed.ArticleFeedView;
import org.ambraproject.wombat.feed.FeedMetadataField;
import org.ambraproject.wombat.feed.FeedType;
import org.ambraproject.wombat.model.JournalFilterType;
import org.ambraproject.wombat.model.SearchFilter;
import org.ambraproject.wombat.model.SearchFilterItem;
import org.ambraproject.wombat.model.SingletonSearchFilterType;
import org.ambraproject.wombat.model.TaxonomyGraph;
import org.ambraproject.wombat.service.BrowseTaxonomyService;
import org.ambraproject.wombat.service.SolrArticleAdapter;
import org.ambraproject.wombat.service.remote.ArticleSearchQuery;
import org.ambraproject.wombat.service.remote.SearchFilterService;
import org.ambraproject.wombat.service.remote.ServiceRequestException;
import org.ambraproject.wombat.service.remote.SolrSearchService;
import org.ambraproject.wombat.service.remote.SolrSearchServiceImpl;
import org.ambraproject.wombat.util.ListUtil;
import org.ambraproject.wombat.util.UrlParamBuilder;
import org.apache.commons.lang.WordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Controller class for user-initiated searches.
*/
@Controller
public class SearchController extends WombatController {
private static final Logger log = LoggerFactory.getLogger(SearchController.class);
@Autowired
private SiteSet siteSet;
@Autowired
private SolrSearchService solrSearchService;
@Autowired
private SearchFilterService searchFilterService;
@Autowired
private BrowseTaxonomyService browseTaxonomyService;
@Autowired
private ArticleFeedView articleFeedView;
private final String BROWSE_RESULTS_PER_PAGE = "13";
/**
* Class that encapsulates the parameters that are shared across many different search types. For example, a subject
* search and an advanced search will have many parameters in common, such as sort order, date range, page, results
* per page, etc. This class eliminates the need to have long lists of @RequestParam parameters duplicated across
* many controller methods.
* <p>
* This class also contains logic having to do with which parameters take precedence over others, defaults when
* parameters are absent, and the like.
*/
@VisibleForTesting
static final class CommonParams {
private enum AdvancedSearchTerms {
EVERYTHING("everything:"),
TITLE("title:"),
AUTHOR("author:"),
BODY("body:"),
ABSTRACT("abstract:"),
SUBJECT("subject:"),
PUBLICATION_DATE("publication_date:"),
ACCEPTED_DATE("accepted_date:"),
ID("id:"),
ARTICLE_TYPE("article_type:"),
AUTHOR_AFFILIATE("author_affiliate:"),
COMPETING_INTEREST("competing_interest:"),
CONCLUSIONS("conclusions:"),
EDITOR("editor:"),
ELOCATION_ID("elocation_id:"),
FIGURE_TABLE_CAPTION("figure_table_caption:"),
FINANCIAL_DISCLOSURE("financial_disclosure:"),
INTRODUCTION("introduction:"),
ISSUE("issue:"),
MATERIALS_AND_METHODS("materials_and_methods:"),
RECEIVED_DATE("received_date:"),
REFERENCE("reference:"),
RESULTS_AND_DISCUSSION("results_and_discussion:"),
SUPPORTING_INFORMATION("supporting_information:"),
TRIAL_REGISTRATION("trial_registration:"),
VOLUME("volume:");
private final String text;
private AdvancedSearchTerms(final String text) {
this.text = text;
}
@Override
public String toString() {
return text;
}
}
/**
* The number of the first desired result (zero-based) that will be passed to solr. Calculated from the page and
* resultsPerPage URL parameters.
*/
int start;
SolrSearchServiceImpl.SolrSortOrder sortOrder;
SolrSearchService.SearchCriterion dateRange;
List<String> articleTypes;
List<String> journalKeys;
@VisibleForTesting
Set<String> filterJournalNames;
@VisibleForTesting
List<String> subjectList;
List<String> authors;
List<String> sections;
/**
* Indicates whether any filter parameters are being applied to the search (journal, subject area, etc).
*/
@VisibleForTesting
boolean isFiltered;
private SiteSet siteSet;
private Site site;
private int resultsPerPage;
private String startDate;
private String endDate;
private final String DEFAULT_START_DATE = "2003-01-01";
// doesn't include journal and date filter param names
static final Set<String> FILTER_PARAMETER_NAMES = Stream.of(SingletonSearchFilterType.values()).map
(SingletonSearchFilterType::getParameterName).collect(Collectors.toSet());
/**
* Constructor.
*
* @param siteSet siteSet associated with the request
* @param site site of the request
*/
CommonParams(SiteSet siteSet, Site site) {
this.siteSet = siteSet;
this.site = site;
}
/**
* Extracts parameters from the raw parameter map, and performs some logic related to what parameters take
* precedence and default values when ones aren't present.
*
* @param params
* @throws IOException
*/
void parseParams(Map<String, List<String>> params) throws IOException {
String pageParam = getSingleParam(params, "page", null);
resultsPerPage = Integer.parseInt(getSingleParam(params, "resultsPerPage", "15"));
if (pageParam != null) {
int page = Integer.parseInt(pageParam);
start = (page - 1) * resultsPerPage;
}
sortOrder = SolrSearchServiceImpl.SolrSortOrder.RELEVANCE;
String sortOrderParam = getSingleParam(params, "sortOrder", null);
if (!Strings.isNullOrEmpty(sortOrderParam)) {
sortOrder = SolrSearchServiceImpl.SolrSortOrder.valueOf(sortOrderParam);
}
dateRange = parseDateRange(getSingleParam(params, "dateRange", null),
getSingleParam(params, "filterStartDate", null), getSingleParam(params, "filterEndDate", null));
journalKeys = ListUtil.isNullOrEmpty(params.get("filterJournals"))
? new ArrayList<String>() : params.get("filterJournals");
filterJournalNames = new HashSet<>();
for (String journalKey : journalKeys) {
filterJournalNames.add(siteSet.getJournalNameFromKey(journalKey));
}
startDate = getSingleParam(params, "filterStartDate", null);
endDate = getSingleParam(params, "filterEndDate", null);
if (startDate == null && endDate != null) {
startDate = DEFAULT_START_DATE;
} else if (startDate != null && endDate == null) {
endDate = new SimpleDateFormat("yyyy-MM-dd").format(Calendar.getInstance().getTime());
}
subjectList = parseSubjects(getSingleParam(params, "subject", null), params.get("filterSubjects"));
articleTypes = params.get("filterArticleTypes");
articleTypes = articleTypes == null ? new ArrayList<String>() : articleTypes;
authors = ListUtil.isNullOrEmpty(params.get("filterAuthors"))
? new ArrayList<String>() : params.get("filterAuthors");
sections = ListUtil.isNullOrEmpty(params.get("filterSections"))
? new ArrayList<String>() : params.get("filterSections");
isFiltered = !filterJournalNames.isEmpty() || !subjectList.isEmpty() || !articleTypes.isEmpty()
|| dateRange != SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME || !authors.isEmpty()
|| startDate != null || endDate != null || !sections.isEmpty();
}
/**
* Adds parameters (and derived values) back to the model needed for results page rendering. This only adds model
* attributes that are shared amongst different types of searches; it is the caller's responsibility to add the
* search results and any other data needed.
*
* @param model model that will be passed to the template
* @param request HttpServletRequest
*/
void addToModel(Model model, HttpServletRequest request) {
model.addAttribute("resultsPerPage", resultsPerPage);
model.addAttribute("filterJournalNames", filterJournalNames);
// TODO: split or share model assignments between mobile and desktop.
model.addAttribute("filterJournals", journalKeys);
model.addAttribute("filterStartDate", startDate);
model.addAttribute("filterEndDate", endDate);
model.addAttribute("filterSubjects", subjectList);
model.addAttribute("filterArticleTypes", articleTypes);
model.addAttribute("filterAuthors", authors);
model.addAttribute("filterSections", sections);
// TODO: bind sticky form params using Spring MVC support for Freemarker. I think we have to add
// some more dependencies to do this. See
// http://static.springsource.org/spring/docs/3.0.x/spring-framework-reference/html/view.html#view-velocity
model.addAttribute("selectedSortOrder", sortOrder);
model.addAttribute("selectedDateRange", dateRange);
model.addAttribute("selectedResultsPerPage", resultsPerPage);
model.addAttribute("isFiltered", isFiltered);
// We pass in the request parameters here, because they are needed by paging.ftl.
// The normal way to get request parameters from a freemarker template is to use the
// RequestParameters variable, but due to a bug in freemarker, this does not handle
// multi-valued parameters correctly. See http://sourceforge.net/p/freemarker/bugs/324/
Map<String, String[]> parameterMap = request.getParameterMap();
model.addAttribute("parameterMap", parameterMap);
Map<String, String[]> clearDateFilterParams = new HashMap<>();
clearDateFilterParams.putAll(parameterMap);
clearDateFilterParams.remove("filterStartDate");
clearDateFilterParams.remove("filterEndDate");
model.addAttribute("dateClearParams", clearDateFilterParams);
Map<String, String[]> clearAllFilterParams = new HashMap<>();
clearAllFilterParams.putAll(clearDateFilterParams);
clearAllFilterParams.remove("filterJournals");
clearAllFilterParams.remove("filterSubjects");
clearAllFilterParams.remove("filterAuthors");
clearAllFilterParams.remove("filterSections");
clearAllFilterParams.remove("filterArticleTypes");
model.addAttribute("clearAllFilterParams", clearAllFilterParams);
}
private String getSingleParam(Map<String, List<String>> params, String key, String defaultValue) {
List<String> values = params.get(key);
return values == null || values.isEmpty() ? defaultValue
: values.get(0) == null || values.get(0).isEmpty() ? defaultValue : values.get(0);
}
/**
* Determines which publication dates to filter by in the search. If no dates are input, a default date range of All
* Time will be used. Mobile search only provides the enumerated dateRangeParam field, while desktop search provides
* explicit fields for start and end dates. The parameters are mutually exclusive.
*
* @param dateRangeParam mobile date range enumeration value
* @param startDate desktop start date value
* @param endDate desktop end date value
* @return A generic @SearchCriterion object used by Solr
*/
private SolrSearchService.SearchCriterion parseDateRange(String dateRangeParam, String startDate, String endDate) {
SolrSearchService.SearchCriterion dateRange = SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME;
if (!Strings.isNullOrEmpty(dateRangeParam)) {
dateRange = SolrSearchServiceImpl.SolrEnumeratedDateRange.valueOf(dateRangeParam);
} else if (!Strings.isNullOrEmpty(startDate) && !Strings.isNullOrEmpty(endDate)) {
dateRange = new SolrSearchServiceImpl.SolrExplicitDateRange("explicit date range", startDate,
endDate);
}
return dateRange;
}
/**
* subject is a mobile-only parameter, while subjects is a desktop-only parameter
*
* @param subject mobile subject area value
* @param subjects desktop list of subject area values
* @return singleton list of subject if subjects is null or empty, else return subjects
*/
private List<String> parseSubjects(String subject, List<String> subjects) {
if (Strings.isNullOrEmpty(subject) && subjects != null && subjects.size() > 0) {
return subjects;
} else {
return subject != null ? Collections.singletonList(subject) : new ArrayList<String>();
}
}
private ArticleSearchQuery.Builder fill(ArticleSearchQuery.Builder builder) {
return builder
.setJournalKeys(journalKeys)
.setArticleTypes(articleTypes)
.setSubjects(subjectList)
.setAuthors(authors)
.setSections(sections)
.setStart(start)
.setRows(resultsPerPage)
.setSortOrder(sortOrder)
.setDateRange(dateRange)
.setStartDate(startDate)
.setEndDate(endDate);
}
private static final ImmutableMap<String, Function<CommonParams, List<String>>> FILTER_KEYS_TO_FIELDS =
ImmutableMap.<String, Function<CommonParams, List<String>>>builder()
.put(JournalFilterType.JOURNAL_FILTER_MAP_KEY, params -> params.journalKeys)
.put(SingletonSearchFilterType.ARTICLE_TYPE.getFilterMapKey(), params -> params.articleTypes)
.put(SingletonSearchFilterType.SUBJECT_AREA.getFilterMapKey(), params -> params.subjectList)
.put(SingletonSearchFilterType.AUTHOR.getFilterMapKey(), params -> params.authors)
.put(SingletonSearchFilterType.SECTION.getFilterMapKey(), params -> params.sections)
.build();
/**
* Examine incoming URL parameters to see which filter items are active. CommonParams contains
* journalKeys, articleTypes, subjectList, authors, and sections parsed from request params.
* Check each string in these lists against their applicable filters.
*
* @param filter the search filter to examine
*/
public void setActiveAndInactiveFilterItems(SearchFilter filter) {
String filterMapKey = filter.getFilterTypeMapKey();
Function<CommonParams, List<String>> getter = FILTER_KEYS_TO_FIELDS.get(filterMapKey);
if (getter == null) {
throw new RuntimeException("Search Filter not configured with sane map key: " + filterMapKey);
}
filter.setActiveAndInactiveFilterItems(getter.apply(this));
}
/**
* Creates an instance of {SearchFilterItem} for active filters using url parameters
*
* @param activeFilterItems set of active filter items
* @param parameterMap request's query parameter
* @param filterName name of the filter
* @param filterValues values of the filter
*/
private void buildActiveFilterItems(Set<SearchFilterItem> activeFilterItems, Map<String,
String[]> parameterMap, String filterName, String[] filterValues) {
for (String filterValue : filterValues) {
List<String> filterValueList = new ArrayList<>(Arrays.asList(filterValues));
Map<String, List<String>> queryParamMap = new HashMap<>();
// covert Map<String, String[]> to Map<String, List<String> for code re-usability
queryParamMap.putAll(parameterMap.entrySet().stream().collect(Collectors.toMap(entry -> entry
.getKey(), entry -> new ArrayList<>(Arrays.asList(entry.getValue())))));
queryParamMap.remove(filterName);
// include the rest of filter values for that specific filter
if (filterValueList.size() > 1) {
filterValueList.remove(filterValue);
queryParamMap.put(filterName, filterValueList);
}
String displayName;
if (filterName.equals("filterJournals")) {
displayName = siteSet.getJournalNameFromKey(filterValue);
} else {
displayName = filterValue;
}
SearchFilterItem filterItem = new SearchFilterItem(displayName, 0,
filterName, filterValue, queryParamMap);
activeFilterItems.add(filterItem);
}
}
/**
* Examine the incoming URL when there is no search result and set the active filters
*
* @return set of active filters
*/
public Set<SearchFilterItem> setActiveFilterParams(Model model, HttpServletRequest request) {
Map<String, String[]> parameterMap = request.getParameterMap();
model.addAttribute("parameterMap", parameterMap);
// exclude non-filter query parameters
Map<String, String[]> filtersOnlyMap = parameterMap.entrySet().stream()
.filter(entry -> FILTER_PARAMETER_NAMES.contains(entry.getKey())
|| ("filterJournals").equals(entry.getKey()))
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
Set<SearchFilterItem> activeFilterItems = new LinkedHashSet<>();
filtersOnlyMap.forEach((filterName, filterValues) -> buildActiveFilterItems(activeFilterItems,
parameterMap, filterName, filterValues));
return activeFilterItems;
}
/**
* @param query the incoming query string
* @return True if the query string does not contain any advanced search terms,
* listed in {@link AdvancedSearchTerms}
*/
private boolean isSimpleSearch(String query) {
return Arrays.stream(AdvancedSearchTerms.values()).noneMatch(e -> query.contains(e.text));
}
}
/**
* Examine the current @code{ArticleSearchQuery} object and build a single URL parameter
* string to append to the current search URL.
*
* @param q the search query to rebuild search URL parameters from
* @return ImmutableListMultimap that contains the URL parameter list
*/
private static ImmutableListMultimap<String, String> rebuildUrlParameters(ArticleSearchQuery q) {
Preconditions.checkArgument(!q.isForRawResults());
Preconditions.checkArgument(!q.getFacet().isPresent());
ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder();
builder.put(q.isSimple() ? "q" : "unformattedQuery", q.getQuery().or(""));
int rows = q.getRows();
builder.put("resultsPerPage", Integer.toString(rows));
if (rows > 0) {
int page = q.getStart() / rows + 1;
builder.put("page", Integer.toString(page));
}
builder.putAll("filterJournals", q.getJournalKeys());
builder.putAll("filterSubjects", q.getSubjects());
builder.putAll("filterAuthors", q.getAuthors());
builder.putAll("filterSections", q.getSections());
builder.putAll("filterArticleTypes", q.getArticleTypes());
builder.putAll("filterStartDate", q.getStartDate() == null ? "" : q.getStartDate());
builder.putAll("filterEndDate", q.getEndDate() == null ? "" : q.getEndDate());
// TODO: Support dateRange. Note this is different from startDate and endDate
// TODO: Support sortOrder
for (Map.Entry<String, String> entry : q.getRawParameters().entrySet()) {
builder.put(entry);
}
return builder.build();
}
private CommonParams modelCommonParams(HttpServletRequest request, Model model,
@SiteParam Site site, @RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = new CommonParams(siteSet, site);
commonParams.parseParams(params);
commonParams.addToModel(model, request);
addOptionsToModel(model);
return commonParams;
}
/**
* Performs a simple search and serves the result as XML to be read by an RSS reader
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params search parameters identical to the {@code search} method
* @return RSS view of articles returned by the search
* @throws IOException
*/
@RequestMapping(name = "searchFeed", value = "/search/feed/{feedType:atom|rss}",
params = {"q", "!volume", "!subject"}, method = RequestMethod.GET)
public ModelAndView getSearchRssFeedView(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String feedType, @RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
String queryString = params.getFirst("q");
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery(queryString)
.setSimple(commonParams.isSimpleSearch(queryString))
.setIsRssSearch(true);
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<String, ?> searchResults = solrSearchService.search(queryObj);
String feedTitle = representQueryParametersAsString(params);
return getFeedModelAndView(site, feedType, feedTitle, searchResults);
}
/**
* Performs an advanced search and serves the result as XML to be read by an RSS reader
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params search parameters identical to the {@code search} method
* @return RSS view of articles returned by the search
* @throws IOException
*/
@RequestMapping(name = "advancedSearchFeed", value = "/search/feed/{feedType:atom|rss}",
params = {"unformattedQuery", "!volume"}, method = RequestMethod.GET)
public ModelAndView getAdvancedSearchRssFeedView(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String feedType, @RequestParam MultiValueMap<String, String> params) throws IOException {
String queryString = params.getFirst("unformattedQuery");
params.remove("unformattedQuery");
params.add("q", queryString);
return getSearchRssFeedView(request, model, site, feedType, params);
}
private static String representQueryParametersAsString(MultiValueMap<String, String> params) {
UrlParamBuilder builder = UrlParamBuilder.params();
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
String key = entry.getKey();
for (String value : entry.getValue()) {
builder.add(key, value);
}
}
return builder.toString();
}
private ModelAndView getFeedModelAndView(Site site, String feedType, String title, Map<String, ?> searchResults) {
ModelAndView mav = new ModelAndView();
FeedMetadataField.SITE.putInto(mav, site);
FeedMetadataField.FEED_INPUT.putInto(mav, searchResults.get("docs"));
FeedMetadataField.TITLE.putInto(mav, title);
mav.setView(FeedType.getView(articleFeedView, feedType));
return mav;
}
// Unless the "!volume" part is included in the params in the next few methods, you will
// get an "ambiguous handler method" exception from spring. I think this is because all
// of these methods (including volumeSearch) use a MultiValueMap for @RequestParam, instead
// of individually listing the params.
/**
* Performs a "simple" or "advanced" search. The query parameter is read, and if advanced search
* terms are found, an advanced search is performed. Otherwise, a simple search is performed. The
* only difference between simple and advanced searches is the use of dismax in the ultimate
* Solr query.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "simpleSearch", value = "/search", params = {"q", "!volume", "!subject"})
public String search(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
String queryString = params.getFirst("q");
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery(queryString)
.setSimple(commonParams.isSimpleSearch(queryString));
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<?, ?> searchResults;
try {
searchResults = solrSearchService.search(queryObj);
} catch (ServiceRequestException sre) {
return handleFailedSolrRequest(model, site, queryString, sre);
}
model.addAttribute("searchResults", solrSearchService.addArticleLinks(searchResults, request, site, siteSet));
Set<SearchFilterItem> activeFilterItems;
if ((Double) searchResults.get("numFound") == 0.0) {
activeFilterItems = commonParams.setActiveFilterParams(model, request);
} else {
Map<String, SearchFilter> filters = searchFilterService.getSearchFilters(queryObj, rebuildUrlParameters(queryObj));
filters.values().forEach(commonParams::setActiveAndInactiveFilterItems);
activeFilterItems = new LinkedHashSet<>();
filters.values().forEach(filter -> activeFilterItems.addAll(filter.getActiveFilterItems()));
model.addAttribute("searchFilters", filters);
}
model.addAttribute("activeFilterItems", activeFilterItems);
return site.getKey() + "/ftl/search/searchResults";
}
private String handleFailedSolrRequest(Model model, Site site, String queryString,
ServiceRequestException sre) throws IOException {
if (sre.getResponseBody().contains("SyntaxError: Cannot parse")) {
log.info("User attempted invalid search: " + queryString + "\n Exception: " + sre.getMessage());
model.addAttribute("cannotParseQueryError", true);
} else {
log.error("Unknown error returned from Solr: " + sre.getMessage());
model.addAttribute("unknownQueryError", true);
}
return newAdvancedSearch(model, site);
}
/**
* This is a catch for advanced searches originating from Old Ambra. It transforms the
* "unformattedQuery" param into "q" which is used by Wombat's new search.
* todo: remove this method once Old Ambra advanced search is destroyed
*/
@RequestMapping(name = "advancedSearch", value = "/search", params = {"unformattedQuery", "!volume"})
public String advancedSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
String queryString = params.getFirst("unformattedQuery");
params.remove("unformattedQuery");
params.add("q", queryString);
return search(request, model, site, params);
}
@RequestMapping(name = "newAdvancedSearch", value = "/search", params = {"!unformattedQuery", "!volume", "!subject"})
public String newAdvancedSearch(Model model, @SiteParam Site site) throws IOException {
model.addAttribute("isNewSearch", true);
model.addAttribute("otherQuery", "");
model.addAttribute("activeFilterItems", new HashSet<>());
return site.getKey() + "/ftl/search/searchResults";
}
/**
* Uses {@link #search(HttpServletRequest, Model, Site, MultiValueMap)} to support the mobile taxonomy
* browser
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "subjectSearch", value = "/search", params = {"subject", "!volume"})
public String subjectSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
params.add("q", "");
return search(request, model, site, params);
}
@RequestMapping(name = "browse", value = "/browse", params = "!filterSubjects")
public String browse(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
subjectAreaSearch(request, model, site, params, "");
return site.getKey() + "/ftl/browseSubjectArea";
}
@RequestMapping(name = "browseSubjectArea", value = "/browse/{subject}", params = "!filterSubjects")
public String browseSubjectArea(HttpServletRequest request, Model model, @SiteParam Site site,
@PathVariable String subject, @RequestParam MultiValueMap<String, String> params)
throws IOException {
subjectAreaSearch(request, model, site, params, subject);
return site.getKey() + "/ftl/browseSubjectArea";
}
// Requests coming from the advanced search form with URLs beginning with "/search/quick/" will always
// have the parameters id, eLocationId, and volume, although only one will be populated. The expressions
// like "id!=" in the following request mappings cause spring to map to controller methods only if
// the corresponding parameters are present, and do not have a value of the empty string.
/**
* Searches for an article having the given doi (the value of the id parameter). If the DOI exists for any journal,
* this method will redirect to the article. Otherwise, an empty search results page will be rendered.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param doi identifies the article
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "doiSearch", value = "/search", params = {"id!="})
public String doiSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam(value = "id", required = true) String doi) throws IOException {
Map<?, ?> searchResults = solrSearchService.lookupArticleByDoi(doi);
return renderSingleResult(searchResults, "doi:" + doi, request, model, site);
}
/**
* Searches for an article having the given eLocationId from the given journal. Note that eLocationIds are only
* unique within journals, so both parameters are necessary. If the article is found, this method will redirect to
* it; otherwise an empty search results page will be rendered.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param eLocationId identifies the article in a journal
* @param journal journal to search within
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "eLocationSearch", value = "/search", params = {"eLocationId!="})
public String eLocationSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam(value = "eLocationId", required = true) String eLocationId,
@RequestParam(value = "filterJournals", required = true) String journal) throws IOException {
Map<?, ?> searchResults = solrSearchService.lookupArticleByELocationId(eLocationId, journal);
return renderSingleResult(searchResults, "elocation_id:" + eLocationId, request, model, site);
}
/**
* Searches for all articles in the volume identified by the value of the volume parameter.
*
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params all URL parameters
* @return String indicating template location
* @throws IOException
*/
@RequestMapping(name = "volumeSearch", value = "/search", params = {"volume!="})
public String volumeSearch(HttpServletRequest request, Model model, @SiteParam Site site,
@RequestParam MultiValueMap<String, String> params) throws IOException {
CommonParams commonParams = modelCommonParams(request, model, site, params);
int volume;
try {
volume = Integer.parseInt(params.getFirst("volume"));
} catch (NumberFormatException nfe) {
return renderEmptyResults(null, "volume:" + params.getFirst("volume"), model, site);
}
ArticleSearchQuery query = commonParams.fill(ArticleSearchQuery.builder()).build();
Map<?, ?> searchResults = solrSearchService.searchVolume(query, volume);
model.addAttribute("searchResults", solrSearchService.addArticleLinks(searchResults, request, site, siteSet));
model.addAttribute("otherQuery", String.format("volume:%d", volume));
Map<String, SearchFilter> filters = searchFilterService.getVolumeSearchFilters(volume,
commonParams.journalKeys, commonParams.articleTypes, commonParams.dateRange);
filters.values().forEach(commonParams::setActiveAndInactiveFilterItems);
Set<SearchFilterItem> activeFilterItems = new HashSet<>();
filters.values().forEach(filter -> activeFilterItems.addAll(filter.getActiveFilterItems()));
model.addAttribute("searchFilters", filters);
model.addAttribute("activeFilterItems", activeFilterItems);
return site.getKey() + "/ftl/search/searchResults";
}
/**
* Renders either an article page, or an empty search results page.
*
* @param searchResults deserialized JSON that should be either empty, or contain a single article
* @param searchTerm the search term (suitable for display) that was input
* @param request HttpServletRequest
* @param model model that will be passed to the template
* @param site site the request originates from
* @return String indicating template location
* @throws IOException
*/
private String renderSingleResult(Map<?, ?> searchResults, String searchTerm, HttpServletRequest request, Model model,
Site site) throws IOException {
int numFound = ((Double) searchResults.get("numFound")).intValue();
if (numFound > 1) {
throw new IllegalStateException("Valid DOIs should return exactly one article");
}
if (numFound == 1) {
searchResults = solrSearchService.addArticleLinks(searchResults, request, site, siteSet);
List docs = (List) searchResults.get("docs");
Map doc = (Map) docs.get(0);
return "redirect:" + doc.get("link");
} else {
return renderEmptyResults(searchResults, searchTerm, model, site);
}
}
/**
* Renders an empty search results page.
*
* @param searchResults empty search results. If null, one will be constructed.
* @param searchTerm the search term (suitable for display) that was input
* @param model model that will be passed to the template
* @param site site the request originates from
* @return String indicating template location
*/
private String renderEmptyResults(Map searchResults, String searchTerm, Model model, Site site) {
if (searchResults == null) {
searchResults = new HashMap<>();
searchResults.put("numFound", 0);
}
model.addAttribute("searchResults", searchResults);
model.addAttribute("otherQuery", searchTerm);
addOptionsToModel(model);
// Add minimum model attributes necessary to render the form.
model.addAttribute("selectedSortOrder", SolrSearchServiceImpl.SolrSortOrder.RELEVANCE);
model.addAttribute("selectedDateRange", SolrSearchServiceImpl.SolrEnumeratedDateRange.ALL_TIME);
model.addAttribute("isFiltered", false);
model.addAttribute("resultsPerPage", 15);
return site.getKey() + "/ftl/search/searchResults";
}
private void addOptionsToModel(Model model) {
model.addAttribute("sortOrders", SolrSearchServiceImpl.SolrSortOrder.values());
model.addAttribute("dateRanges", SolrSearchServiceImpl.SolrEnumeratedDateRange.values());
}
/**
* Set defaults and performs search for subject area landing page
*
* @param request HTTP request for browsing subject areas
* @param model model that will be passed to the template
* @param site site the request originates from
* @param params HTTP request params
* @param subject the subject area to be search; return all articles if no subject area is provided
* @throws IOException
*/
private void subjectAreaSearch(HttpServletRequest request, Model model, Site site,
MultiValueMap<String, String> params, String subject) throws IOException {
modelSubjectHierarchy(model, site, subject);
String subjectName;
if (Strings.isNullOrEmpty(subject)) {
params.add("subject", "");
subjectName = "All Subject Areas";
} else {
subject = subject.replace("_", " ");
params.add("subject", subject);
subjectName = WordUtils.capitalize(subject);
}
model.addAttribute("subjectName", subjectName);
// set defaults for subject area landing page
if (ListUtil.isNullOrEmpty(params.get("resultsPerPage"))) {
params.add("resultsPerPage", BROWSE_RESULTS_PER_PAGE);
}
if (ListUtil.isNullOrEmpty(params.get("sortOrder"))) {
params.add("sortOrder", "DATE_NEWEST_FIRST");
}
if (ListUtil.isNullOrEmpty(params.get("filterJournals"))) {
params.add("filterJournals", site.getJournalKey());
}
CommonParams commonParams = new CommonParams(siteSet, site);
commonParams.parseParams(params);
commonParams.addToModel(model, request);
ArticleSearchQuery.Builder query = ArticleSearchQuery.builder()
.setQuery("")
.setSimple(false);
commonParams.fill(query);
ArticleSearchQuery queryObj = query.build();
Map<String, ?> searchResults = solrSearchService.search(queryObj);
model.addAttribute("articles", SolrArticleAdapter.unpackSolrQuery(searchResults));
model.addAttribute("searchResults", searchResults);
model.addAttribute("page", commonParams.getSingleParam(params, "page", "1"));
model.addAttribute("journalKey", site.getKey());
}
private void modelSubjectHierarchy(Model model, Site site, String subject) throws IOException {
TaxonomyGraph fullTaxonomyView = browseTaxonomyService.parseCategories(site.getJournalKey());
Set<String> subjectParents;
Set<String> subjectChildren;
if (subject != null && subject.length() > 0) {
//Recreate the category name as stored in the DB
subject = subject.replace("_", " ");
TaxonomyGraph.CategoryView categoryView = fullTaxonomyView.getView(subject);
if (categoryView == null) {
throw new NotFoundException(String.format("category %s does not exist.", subject));
} else {
if (categoryView.getParents().isEmpty()) {
subjectParents = new HashSet<>();
} else {
subjectParents = categoryView.getParents().keySet();
}
subjectChildren = categoryView.getChildren().keySet();
}
} else {
subjectParents = new HashSet<>();
subjectChildren = fullTaxonomyView.getRootCategoryNames();
}
model.addAttribute("subjectParents", subjectParents);
model.addAttribute("subjectChildren", subjectChildren);
}
}
| DPRO-2277: fix browse search endpoint
(cherry picked from commit 1815ee21d978f7f318d8e17dcb7ae8bec7d455d6)
| src/main/java/org/ambraproject/wombat/controller/SearchController.java | DPRO-2277: fix browse search endpoint | <ide><path>rc/main/java/org/ambraproject/wombat/controller/SearchController.java
<ide> * "unformattedQuery" param into "q" which is used by Wombat's new search.
<ide> * todo: remove this method once Old Ambra advanced search is destroyed
<ide> */
<del> @RequestMapping(name = "advancedSearch", value = "/search", params = {"unformattedQuery", "!volume"})
<add> @RequestMapping(name = "advancedSearch", value = "/search", params = {"unformattedQuery", "!volume", "!subject"})
<ide> public String advancedSearch(HttpServletRequest request, Model model, @SiteParam Site site,
<ide> @RequestParam MultiValueMap<String, String> params) throws IOException {
<ide> String queryString = params.getFirst("unformattedQuery"); |
|
Java | apache-2.0 | 4c216e257c445d69f5b4a6a2a3962e01f5685afc | 0 | redisson/redisson,mrniko/redisson | /**
* Copyright (c) 2013-2019 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.codec;
import java.io.IOException;
import java.lang.reflect.Field;
import org.nustaq.serialization.FSTConfiguration;
import org.nustaq.serialization.FSTDecoder;
import org.nustaq.serialization.FSTEncoder;
import org.nustaq.serialization.FSTObjectInput;
import org.nustaq.serialization.FSTObjectOutput;
import org.nustaq.serialization.coders.FSTStreamDecoder;
import org.nustaq.serialization.coders.FSTStreamEncoder;
import org.redisson.client.codec.BaseCodec;
import org.redisson.client.handler.State;
import org.redisson.client.protocol.Decoder;
import org.redisson.client.protocol.Encoder;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufOutputStream;
/**
* Efficient and speedy serialization codec fully
* compatible with JDK Serialization codec.
*
* https://github.com/RuedigerMoeller/fast-serialization
*
* @author Nikita Koksharov
*
*/
public class FstCodec extends BaseCodec {
static class FSTDefaultStreamCoderFactory implements FSTConfiguration.StreamCoderFactory {
Field chBufField;
Field ascStringCacheField;
{
try {
chBufField = FSTStreamDecoder.class.getDeclaredField("chBufS");
ascStringCacheField = FSTStreamDecoder.class.getDeclaredField("ascStringCache");
} catch (Exception e) {
throw new IllegalStateException(e);
}
ascStringCacheField.setAccessible(true);
chBufField.setAccessible(true);
}
private FSTConfiguration fstConfiguration;
FSTDefaultStreamCoderFactory(FSTConfiguration fstConfiguration) {
this.fstConfiguration = fstConfiguration;
}
@Override
public FSTEncoder createStreamEncoder() {
return new FSTStreamEncoder(fstConfiguration);
}
@Override
public FSTDecoder createStreamDecoder() {
return new FSTStreamDecoder(fstConfiguration) {
public String readStringUTF() throws IOException {
try {
String res = super.readStringUTF();
chBufField.set(this, null);
return res;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public String readStringAsc() throws IOException {
try {
String res = super.readStringAsc();
ascStringCacheField.set(this, null);
return res;
} catch (Exception e) {
throw new IOException(e);
}
}
};
}
static ThreadLocal input = new ThreadLocal();
static ThreadLocal output = new ThreadLocal();
@Override
public ThreadLocal getInput() {
return input;
}
@Override
public ThreadLocal getOutput() {
return output;
}
}
private final FSTConfiguration config;
public FstCodec() {
this(FSTConfiguration.createDefaultConfiguration());
}
public FstCodec(ClassLoader classLoader) {
this(createConfig(classLoader));
}
public FstCodec(ClassLoader classLoader, FstCodec codec) {
this(copy(classLoader, codec));
}
private static FSTConfiguration copy(ClassLoader classLoader, FstCodec codec) {
FSTConfiguration def = FSTConfiguration.createDefaultConfiguration();
def.setClassLoader(classLoader);
def.setCoderSpecific(codec.config.getCoderSpecific());
def.setCrossPlatform(codec.config.isCrossPlatform());
def.setForceClzInit(codec.config.isForceClzInit());
def.setForceSerializable(codec.config.isForceSerializable());
def.setInstantiator(codec.config.getInstantiator(null));
def.setName(codec.config.getName());
def.setPreferSpeed(codec.config.isPreferSpeed());
def.setShareReferences(codec.config.isShareReferences());
def.setStreamCoderFactory(codec.config.getStreamCoderFactory());
def.setVerifier(codec.config.getVerifier());
return def;
}
private static FSTConfiguration createConfig(ClassLoader classLoader) {
FSTConfiguration def = FSTConfiguration.createDefaultConfiguration();
def.setClassLoader(classLoader);
return def;
}
public FstCodec(FSTConfiguration fstConfiguration) {
config = fstConfiguration;
config.setStreamCoderFactory(new FSTDefaultStreamCoderFactory(config));
}
private final Decoder<Object> decoder = new Decoder<Object>() {
@Override
public Object decode(ByteBuf buf, State state) throws IOException {
ByteBufInputStream in = new ByteBufInputStream(buf);
FSTObjectInput inputStream = config.getObjectInput(in);
try {
return inputStream.readObject();
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
// } finally {
// inputStream.resetForReuseUseArray(empty);
}
}
};
private final Encoder encoder = new Encoder() {
@Override
public ByteBuf encode(Object in) throws IOException {
ByteBuf out = ByteBufAllocator.DEFAULT.buffer();
ByteBufOutputStream os = new ByteBufOutputStream(out);
FSTObjectOutput oos = config.getObjectOutput(os);
try {
oos.writeObject(in);
oos.flush();
return os.buffer();
} catch (IOException e) {
out.release();
throw e;
} catch (Exception e) {
out.release();
throw new IOException(e);
// } finally {
// oos.resetForReUse(empty);
}
}
};
@Override
public Decoder<Object> getValueDecoder() {
return decoder;
}
@Override
public Encoder getValueEncoder() {
return encoder;
}
@Override
public ClassLoader getClassLoader() {
if (config.getClassLoader() != null) {
return config.getClassLoader();
}
return super.getClassLoader();
}
}
| redisson/src/main/java/org/redisson/codec/FstCodec.java | /**
* Copyright (c) 2013-2019 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.codec;
import java.io.IOException;
import java.lang.reflect.Field;
import org.nustaq.serialization.FSTConfiguration;
import org.nustaq.serialization.FSTDecoder;
import org.nustaq.serialization.FSTEncoder;
import org.nustaq.serialization.FSTObjectInput;
import org.nustaq.serialization.FSTObjectOutput;
import org.nustaq.serialization.coders.FSTStreamDecoder;
import org.nustaq.serialization.coders.FSTStreamEncoder;
import org.redisson.client.codec.BaseCodec;
import org.redisson.client.handler.State;
import org.redisson.client.protocol.Decoder;
import org.redisson.client.protocol.Encoder;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufOutputStream;
/**
* Efficient and speedy serialization codec fully
* compatible with JDK Serialization codec.
*
* https://github.com/RuedigerMoeller/fast-serialization
*
* @author Nikita Koksharov
*
*/
public class FstCodec extends BaseCodec {
static class FSTDefaultStreamCoderFactory implements FSTConfiguration.StreamCoderFactory {
Field chBufField;
Field ascStringCacheField;
{
try {
chBufField = FSTStreamDecoder.class.getDeclaredField("chBufS");
ascStringCacheField = FSTStreamDecoder.class.getDeclaredField("ascStringCache");
} catch (Exception e) {
throw new IllegalStateException(e);
}
ascStringCacheField.setAccessible(true);
chBufField.setAccessible(true);
}
private FSTConfiguration fstConfiguration;
FSTDefaultStreamCoderFactory(FSTConfiguration fstConfiguration) {
this.fstConfiguration = fstConfiguration;
}
@Override
public FSTEncoder createStreamEncoder() {
return new FSTStreamEncoder(fstConfiguration);
}
@Override
public FSTDecoder createStreamDecoder() {
return new FSTStreamDecoder(fstConfiguration) {
public String readStringUTF() throws IOException {
try {
String res = super.readStringUTF();
chBufField.set(this, null);
return res;
} catch (Exception e) {
throw new IOException(e);
}
}
@Override
public String readStringAsc() throws IOException {
try {
String res = super.readStringAsc();
ascStringCacheField.set(this, null);
return res;
} catch (Exception e) {
throw new IOException(e);
}
}
};
}
static ThreadLocal input = new ThreadLocal();
static ThreadLocal output = new ThreadLocal();
@Override
public ThreadLocal getInput() {
return input;
}
@Override
public ThreadLocal getOutput() {
return output;
}
}
private final FSTConfiguration config;
public FstCodec() {
this(FSTConfiguration.createDefaultConfiguration());
}
public FstCodec(ClassLoader classLoader) {
this(createConfig(classLoader));
}
public FstCodec(ClassLoader classLoader, FstCodec codec) {
this(copy(classLoader, codec));
}
private static FSTConfiguration copy(ClassLoader classLoader, FstCodec codec) {
FSTConfiguration def = FSTConfiguration.createDefaultConfiguration();
def.setClassLoader(classLoader);
def.setCoderSpecific(codec.config.getCoderSpecific());
def.setCrossPlatform(codec.config.isCrossPlatform());
def.setForceClzInit(codec.config.isForceClzInit());
def.setForceSerializable(codec.config.isForceSerializable());
def.setInstantiator(codec.config.getInstantiator(null));
def.setName(codec.config.getName());
def.setPreferSpeed(codec.config.isPreferSpeed());
def.setShareReferences(codec.config.isShareReferences());
def.setStreamCoderFactory(codec.config.getStreamCoderFactory());
def.setVerifier(codec.config.getVerifier());
return def;
}
private static FSTConfiguration createConfig(ClassLoader classLoader) {
FSTConfiguration def = FSTConfiguration.createDefaultConfiguration();
def.setClassLoader(classLoader);
return def;
}
public FstCodec(FSTConfiguration fstConfiguration) {
config = fstConfiguration;
config.setShareReferences(false);
config.setStreamCoderFactory(new FSTDefaultStreamCoderFactory(config));
}
private final Decoder<Object> decoder = new Decoder<Object>() {
@Override
public Object decode(ByteBuf buf, State state) throws IOException {
ByteBufInputStream in = new ByteBufInputStream(buf);
FSTObjectInput inputStream = config.getObjectInput(in);
try {
return inputStream.readObject();
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
// } finally {
// inputStream.resetForReuseUseArray(empty);
}
}
};
private final Encoder encoder = new Encoder() {
@Override
public ByteBuf encode(Object in) throws IOException {
ByteBuf out = ByteBufAllocator.DEFAULT.buffer();
ByteBufOutputStream os = new ByteBufOutputStream(out);
FSTObjectOutput oos = config.getObjectOutput(os);
try {
oos.writeObject(in);
oos.flush();
return os.buffer();
} catch (IOException e) {
out.release();
throw e;
} catch (Exception e) {
out.release();
throw new IOException(e);
// } finally {
// oos.resetForReUse(empty);
}
}
};
@Override
public Decoder<Object> getValueDecoder() {
return decoder;
}
@Override
public Encoder getValueEncoder() {
return encoder;
}
@Override
public ClassLoader getClassLoader() {
if (config.getClassLoader() != null) {
return config.getClassLoader();
}
return super.getClassLoader();
}
}
| Fixed - FSTCodec memory leak #1927
| redisson/src/main/java/org/redisson/codec/FstCodec.java | Fixed - FSTCodec memory leak #1927 | <ide><path>edisson/src/main/java/org/redisson/codec/FstCodec.java
<ide>
<ide> public FstCodec(FSTConfiguration fstConfiguration) {
<ide> config = fstConfiguration;
<del> config.setShareReferences(false);
<ide> config.setStreamCoderFactory(new FSTDefaultStreamCoderFactory(config));
<ide> }
<ide> |
|
Java | mit | f0eea2892d7a994aca9c4001e4a63d63f0f67d29 | 0 | michaelahlers/embedded-phantom | package ahlers.phantom.embedded;
import com.google.common.base.Optional;
import de.flapdoodle.embed.process.config.store.FileSet;
import de.flapdoodle.embed.process.config.store.FileType;
import de.flapdoodle.embed.process.config.store.IPackageResolver;
import de.flapdoodle.embed.process.distribution.ArchiveType;
import de.flapdoodle.embed.process.distribution.Distribution;
import static de.flapdoodle.embed.process.distribution.ArchiveType.TBZ2;
import static de.flapdoodle.embed.process.distribution.ArchiveType.ZIP;
import static de.flapdoodle.embed.process.distribution.Platform.Linux;
/**
* Translates components of {@link Distribution} into literal components of PhantomJS's distributed files.
*
* @author [[mailto:[email protected] Michael Ahlers]]
* @see <a href="http://phantomjs.org/download.html">PhantomJS: Download</a>
*/
public enum PhantomPackageResolver
implements IPackageResolver {
INSTANCE;
public static PhantomPackageResolver getInstance() {
return INSTANCE;
}
public static ArchiveType archiveTypeFor(final Distribution distribution) {
switch (distribution.getPlatform()) {
case Linux:
return TBZ2;
case OS_X:
case Windows:
return ZIP;
default:
throw new UnsupportedPlatformException(distribution);
}
}
@Override
public ArchiveType getArchiveType(final Distribution distribution) {
return archiveTypeFor(distribution);
}
static String platformClassifierFor(final Distribution distribution) {
switch (distribution.getPlatform()) {
case Linux:
return "linux";
case OS_X:
return "macosx";
case Windows:
return "windows";
default:
throw new UnsupportedPlatformException(distribution);
}
}
static Optional<String> bitsizeClassifierFor(final Distribution distribution) {
if (Linux == distribution.getPlatform()) {
switch (distribution.getBitsize()) {
case B32:
return Optional.of("i686");
case B64:
return Optional.of("x86_64");
default:
throw new UnsupportedBitsizeException(distribution);
}
} else {
return Optional.absent();
}
}
public static String archiveFilenameFor(final Distribution distribution) {
final String version = distribution.getVersion().asInDownloadPath();
final String platformClassifier = platformClassifierFor(distribution);
final Optional<String> bitsizeClassifier = bitsizeClassifierFor(distribution);
return String.format("phantomjs-%s-%s%s", version, platformClassifier, bitsizeClassifier.isPresent() ? "-" + bitsizeClassifier.get() : "");
}
public static String archiveExtensionFor(final Distribution distribution, final ArchiveType archiveType) {
switch (archiveType) {
case TBZ2:
return "tar.bz2";
case ZIP:
return "zip";
default:
throw new UnsupportedArchiveException(distribution, archiveType);
}
}
public static String archiveExtensionFor(final Distribution distribution) {
final ArchiveType archiveType = archiveTypeFor(distribution);
return archiveExtensionFor(distribution, archiveType);
}
public static String archivePathFor(final Distribution distribution) {
final String filename = archiveFilenameFor(distribution);
final String extension = archiveExtensionFor(distribution);
return String.format("%s.%s", filename, extension);
}
@Override
public String getPath(final Distribution distribution) {
return archivePathFor(distribution);
}
@Override
public FileSet getFileSet(final Distribution distribution) {
final FileSet.Builder builder = FileSet.builder();
switch (distribution.getPlatform()) {
case Linux:
case OS_X:
builder.addEntry(FileType.Executable, "phantomjs");
break;
case Windows:
builder.addEntry(FileType.Executable, "phantomjs.exe");
break;
default:
throw new UnsupportedPlatformException(distribution);
}
return builder.build();
}
}
| src/main/java/ahlers/phantom/embedded/PhantomPackageResolver.java | package ahlers.phantom.embedded;
import com.google.common.base.Optional;
import de.flapdoodle.embed.process.config.store.FileSet;
import de.flapdoodle.embed.process.config.store.FileType;
import de.flapdoodle.embed.process.config.store.IPackageResolver;
import de.flapdoodle.embed.process.distribution.ArchiveType;
import de.flapdoodle.embed.process.distribution.Distribution;
import static de.flapdoodle.embed.process.distribution.ArchiveType.TBZ2;
import static de.flapdoodle.embed.process.distribution.ArchiveType.ZIP;
import static de.flapdoodle.embed.process.distribution.Platform.Linux;
/**
* Translates components of {@link Distribution} into literal components of PhantomJS's distributed files.
*
* @author [[mailto:[email protected] Michael Ahlers]]
* @see <a href="http://phantomjs.org/download.html">PhantomJS: Download</a>
*/
public enum PhantomPackageResolver
implements IPackageResolver {
INSTANCE;
public static PhantomPackageResolver getInstance() {
return INSTANCE;
}
public static ArchiveType archiveTypeFor(final Distribution distribution) {
switch (distribution.getPlatform()) {
case Linux:
return TBZ2;
case OS_X:
case Windows:
return ZIP;
default:
throw new UnsupportedPlatformException(distribution);
}
}
@Override
public ArchiveType getArchiveType(final Distribution distribution) {
return archiveTypeFor(distribution);
}
static String platformClassifierFor(final Distribution distribution) {
switch (distribution.getPlatform()) {
case Linux:
return "linux";
case OS_X:
return "macosx";
case Windows:
return "windows";
default:
throw new UnsupportedPlatformException(distribution);
}
}
static Optional<String> bitsizeClassifierFor(final Distribution distribution) {
if (Linux == distribution.getPlatform()) {
switch (distribution.getBitsize()) {
case B32:
return Optional.of("i686");
case B64:
return Optional.of("x86_64");
default:
throw new UnsupportedBitsizeException(distribution);
}
} else {
return Optional.absent();
}
}
public static String archiveFilenameFor(final Distribution distribution) {
final String version = distribution.getVersion().asInDownloadPath();
final String platformClassifier = platformClassifierFor(distribution);
final Optional<String> bitsizeClassifier = bitsizeClassifierFor(distribution);
return String.format("phantomjs-%s-%s%s", version, platformClassifier, bitsizeClassifier.isPresent() ? "-" + bitsizeClassifier.get() : "");
}
public static String archiveExtensionFor(final Distribution distribution) {
final ArchiveType archiveType = archiveTypeFor(distribution);
switch (archiveType) {
case TBZ2:
return "tar.bz2";
case ZIP:
return "zip";
default:
throw new UnsupportedArchiveException(distribution, archiveType);
}
}
@Override
public String getPath(final Distribution distribution) {
final String filename = archiveFilenameFor(distribution);
final String extension = archiveExtensionFor(distribution);
return String.format("%s.%s", filename, extension);
}
@Override
public FileSet getFileSet(final Distribution distribution) {
final FileSet.Builder builder = FileSet.builder();
switch (distribution.getPlatform()) {
case Linux:
case OS_X:
builder.addEntry(FileType.Executable, "phantomjs");
break;
case Windows:
builder.addEntry(FileType.Executable, "phantomjs.exe");
break;
default:
throw new UnsupportedPlatformException(distribution);
}
return builder.build();
}
}
| Overload for tests (for #17)
| src/main/java/ahlers/phantom/embedded/PhantomPackageResolver.java | Overload for tests (for #17) | <ide><path>rc/main/java/ahlers/phantom/embedded/PhantomPackageResolver.java
<ide> return String.format("phantomjs-%s-%s%s", version, platformClassifier, bitsizeClassifier.isPresent() ? "-" + bitsizeClassifier.get() : "");
<ide> }
<ide>
<del> public static String archiveExtensionFor(final Distribution distribution) {
<del> final ArchiveType archiveType = archiveTypeFor(distribution);
<del>
<add> public static String archiveExtensionFor(final Distribution distribution, final ArchiveType archiveType) {
<ide> switch (archiveType) {
<ide> case TBZ2:
<ide> return "tar.bz2";
<ide> }
<ide> }
<ide>
<del> @Override
<del> public String getPath(final Distribution distribution) {
<add> public static String archiveExtensionFor(final Distribution distribution) {
<add> final ArchiveType archiveType = archiveTypeFor(distribution);
<add> return archiveExtensionFor(distribution, archiveType);
<add> }
<add>
<add> public static String archivePathFor(final Distribution distribution) {
<ide> final String filename = archiveFilenameFor(distribution);
<ide> final String extension = archiveExtensionFor(distribution);
<ide> return String.format("%s.%s", filename, extension);
<add> }
<add>
<add> @Override
<add> public String getPath(final Distribution distribution) {
<add> return archivePathFor(distribution);
<ide> }
<ide>
<ide> @Override |
|
Java | epl-1.0 | a1e5af71c6f9e4cd1e90efb47c591919b19be0fc | 0 | rbevers/fitnesse,amolenaar/fitnesse,rbevers/fitnesse,rbevers/fitnesse,hansjoachim/fitnesse,jdufner/fitnesse,hansjoachim/fitnesse,jdufner/fitnesse,amolenaar/fitnesse,jdufner/fitnesse,amolenaar/fitnesse,hansjoachim/fitnesse | package fitnesse.wikitext.parser;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import util.Maybe;
public class Symbol {
public static final Maybe<Symbol> nothing = new Maybe<Symbol>();
public static final Symbol emptySymbol = new Symbol(SymbolType.Empty);
private static final List<Symbol> NO_CHILDREN = Collections.emptyList();
private SymbolType type;
private String content;
private List<Symbol> children;
private Map<String,String> variables;
private Map<String,String> properties;
public Symbol(SymbolType type) { this(type, ""); }
public Symbol(SymbolType type, String content) {
this.content = content;
this.type = type;
this.children = type.matchesFor(SymbolType.SymbolList)
? new ArrayList<Symbol>(2)
: NO_CHILDREN;
}
public SymbolType getType() { return type; }
public boolean isType(SymbolType type) { return this.type.matchesFor(type); }
public boolean isStartCell() { return isType(Table.symbolType) || isType(SymbolType.EndCell); }
public boolean isStartLine() { return isType(HorizontalRule.symbolType) || isType(Nesting.symbolType); }
public boolean isLineType() {
return isType(HeaderLine.symbolType) || isType(SymbolType.CenterLine) || isType(SymbolType.Meta) ||
isType(SymbolType.NoteLine);
}
public String getContent() { return content; }
public void setContent(String content) { this.content = content; }
public Symbol childAt(int index) { return getChildren().get(index); }
public Symbol lastChild() { return childAt(getChildren().size() - 1); }
public List<Symbol> getChildren() { return children; }
private List<Symbol> children() {
if (children == NO_CHILDREN) {
children = new ArrayList<Symbol>(1);
}
return children;
}
public Symbol addToFront(Symbol child) {
children().add(0, child);
return this;
}
public Symbol add(Symbol child) {
children().add(child);
return this;
}
public Symbol add(String text) {
children().add(new Symbol(SymbolType.Text, text));
return this;
}
public Symbol childrenAfter(int after) {
Symbol result = new Symbol(SymbolType.SymbolList);
for (int i = after + 1; i < children.size(); i++) result.add(children.get(i));
return result;
}
public boolean walkPostOrder(SymbolTreeWalker walker) {
if (walker.visitChildren(this)) {
for (Symbol child: children) {
if (!child.walkPostOrder(walker)) return false;
}
}
return walker.visit(this);
}
public boolean walkPreOrder(SymbolTreeWalker walker) {
if (!walker.visit(this)) return false;
if (walker.visitChildren(this)) {
for (Symbol child: children) {
if (!child.walkPreOrder(walker)) return false;
}
}
return true;
}
public void evaluateVariables(String[] names, VariableSource source) {
if (variables == null) variables = new HashMap<String,String>(names.length);
for (String name: names) {
Maybe<String> value = source.findVariable(name);
if (!value.isNothing()) variables.put(name, value.getValue());
}
}
public String getVariable(String name, String defaultValue) {
return variables != null && variables.containsKey(name) ? variables.get(name) : defaultValue;
}
public Symbol putProperty(String key, String value) {
if (properties == null) properties = new HashMap<String,String>(1);
properties.put(key, value);
return this;
}
public boolean hasProperty(String key) {
return properties != null && properties.containsKey(key);
}
public String getProperty(String key, String defaultValue) {
return properties != null && properties.containsKey(key) ? properties.get(key) : defaultValue;
}
public String getProperty(String key) {
return getProperty(key, "");
}
public SymbolType closeType() {
return type == SymbolType.OpenBrace ? SymbolType.CloseBrace
: type == SymbolType.OpenBracket ? SymbolType.CloseBracket
: type == SymbolType.OpenParenthesis ? SymbolType.CloseParenthesis
: type == Literal.symbolType ? SymbolType.CloseLiteral
: type == Comment.symbolType ? SymbolType.Newline
: SymbolType.Empty;
}
}
| src/fitnesse/wikitext/parser/Symbol.java | package fitnesse.wikitext.parser;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import util.Maybe;
public class Symbol {
public static final Maybe<Symbol> nothing = new Maybe<Symbol>();
public static final Symbol emptySymbol = new Symbol(SymbolType.Empty);
private static final List<Symbol> NO_CHILDREN = Collections.emptyList();
private SymbolType type;
private String content;
private List<Symbol> children;
private Properties variables;
private Properties properties;
public Symbol(SymbolType type) { this(type, ""); }
public Symbol(SymbolType type, String content) {
this.content = content;
this.type = type;
this.children = type.matchesFor(SymbolType.SymbolList)
? new ArrayList<Symbol>(2)
: NO_CHILDREN;
}
public SymbolType getType() { return type; }
public boolean isType(SymbolType type) { return this.type.matchesFor(type); }
public boolean isStartCell() { return isType(Table.symbolType) || isType(SymbolType.EndCell); }
public boolean isStartLine() { return isType(HorizontalRule.symbolType) || isType(Nesting.symbolType); }
public boolean isLineType() {
return isType(HeaderLine.symbolType) || isType(SymbolType.CenterLine) || isType(SymbolType.Meta) ||
isType(SymbolType.NoteLine);
}
public String getContent() { return content; }
public void setContent(String content) { this.content = content; }
public Symbol childAt(int index) { return getChildren().get(index); }
public Symbol lastChild() { return childAt(getChildren().size() - 1); }
public List<Symbol> getChildren() { return children; }
private List<Symbol> children() {
if (children == NO_CHILDREN) {
children = new ArrayList<Symbol>(1);
}
return children;
}
public Symbol addToFront(Symbol child) {
children().add(0, child);
return this;
}
public Symbol add(Symbol child) {
children().add(child);
return this;
}
public Symbol add(String text) {
children().add(new Symbol(SymbolType.Text, text));
return this;
}
public Symbol childrenAfter(int after) {
Symbol result = new Symbol(SymbolType.SymbolList);
for (int i = after + 1; i < children.size(); i++) result.add(children.get(i));
return result;
}
public boolean walkPostOrder(SymbolTreeWalker walker) {
if (walker.visitChildren(this)) {
for (Symbol child: children) {
if (!child.walkPostOrder(walker)) return false;
}
}
return walker.visit(this);
}
public boolean walkPreOrder(SymbolTreeWalker walker) {
if (!walker.visit(this)) return false;
if (walker.visitChildren(this)) {
for (Symbol child: children) {
if (!child.walkPreOrder(walker)) return false;
}
}
return true;
}
public void evaluateVariables(String[] names, VariableSource source) {
if (variables == null) variables = new Properties();
for (String name: names) {
Maybe<String> value = source.findVariable(name);
if (!value.isNothing()) variables.put(name, value.getValue());
}
}
public String getVariable(String name, String defaultValue) {
return variables != null && variables.containsKey(name) ? variables.getProperty(name) : defaultValue;
}
public Symbol putProperty(String key, String value) {
if (properties == null) properties = new Properties();
properties.put(key, value);
return this;
}
public boolean hasProperty(String key) {
return properties != null && properties.containsKey(key);
}
public String getProperty(String key, String defaultValue) {
return properties != null && properties.containsKey(key) ? properties.getProperty(key) : defaultValue;
}
public String getProperty(String key) {
return getProperty(key, "");
}
public SymbolType closeType() {
return type == SymbolType.OpenBrace ? SymbolType.CloseBrace
: type == SymbolType.OpenBracket ? SymbolType.CloseBracket
: type == SymbolType.OpenParenthesis ? SymbolType.CloseParenthesis
: type == Literal.symbolType ? SymbolType.CloseLiteral
: type == Comment.symbolType ? SymbolType.Newline
: SymbolType.Empty;
}
}
| Use unsynchronized HashMap instead of synchronized Properties
Avoid performance penalty because of inherent macro syncronization.
| src/fitnesse/wikitext/parser/Symbol.java | Use unsynchronized HashMap instead of synchronized Properties | <ide><path>rc/fitnesse/wikitext/parser/Symbol.java
<ide>
<ide> import java.util.ArrayList;
<ide> import java.util.Collections;
<add>import java.util.HashMap;
<ide> import java.util.List;
<del>import java.util.Properties;
<add>import java.util.Map;
<ide>
<ide> import util.Maybe;
<ide>
<ide> private SymbolType type;
<ide> private String content;
<ide> private List<Symbol> children;
<del> private Properties variables;
<del> private Properties properties;
<add> private Map<String,String> variables;
<add> private Map<String,String> properties;
<ide>
<ide> public Symbol(SymbolType type) { this(type, ""); }
<ide>
<ide> }
<ide>
<ide> public void evaluateVariables(String[] names, VariableSource source) {
<del> if (variables == null) variables = new Properties();
<add> if (variables == null) variables = new HashMap<String,String>(names.length);
<ide> for (String name: names) {
<ide> Maybe<String> value = source.findVariable(name);
<ide> if (!value.isNothing()) variables.put(name, value.getValue());
<ide> }
<ide>
<ide> public String getVariable(String name, String defaultValue) {
<del> return variables != null && variables.containsKey(name) ? variables.getProperty(name) : defaultValue;
<add> return variables != null && variables.containsKey(name) ? variables.get(name) : defaultValue;
<ide> }
<ide>
<ide> public Symbol putProperty(String key, String value) {
<del> if (properties == null) properties = new Properties();
<add> if (properties == null) properties = new HashMap<String,String>(1);
<ide> properties.put(key, value);
<ide> return this;
<ide> }
<ide> }
<ide>
<ide> public String getProperty(String key, String defaultValue) {
<del> return properties != null && properties.containsKey(key) ? properties.getProperty(key) : defaultValue;
<add> return properties != null && properties.containsKey(key) ? properties.get(key) : defaultValue;
<ide> }
<ide>
<ide> public String getProperty(String key) { |
|
Java | apache-2.0 | 3008d5d34017addd7b265df5e018205b95d94122 | 0 | kidaa/jena,kidaa/jena,apache/jena,CesarPantoja/jena,apache/jena,apache/jena,kidaa/jena,samaitra/jena,samaitra/jena,kidaa/jena,samaitra/jena,apache/jena,kidaa/jena,CesarPantoja/jena,kidaa/jena,samaitra/jena,samaitra/jena,apache/jena,CesarPantoja/jena,kidaa/jena,samaitra/jena,CesarPantoja/jena,apache/jena,apache/jena,CesarPantoja/jena,CesarPantoja/jena,CesarPantoja/jena,apache/jena,samaitra/jena | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.modify;
import static org.apache.jena.sparql.modify.TemplateLib.remapDefaultGraph ;
import static org.apache.jena.sparql.modify.TemplateLib.template ;
import java.util.ArrayList ;
import java.util.Collection ;
import java.util.Iterator ;
import java.util.List ;
import org.apache.jena.atlas.data.BagFactory ;
import org.apache.jena.atlas.data.DataBag ;
import org.apache.jena.atlas.data.ThresholdPolicy ;
import org.apache.jena.atlas.data.ThresholdPolicyFactory ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.atlas.lib.Pair ;
import org.apache.jena.atlas.lib.Sink ;
import org.apache.jena.atlas.web.TypedInputStream ;
import org.apache.jena.graph.Graph ;
import org.apache.jena.graph.GraphUtil ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.query.Query ;
import org.apache.jena.query.QueryExecutionFactory ;
import org.apache.jena.riot.Lang ;
import org.apache.jena.riot.RDFDataMgr ;
import org.apache.jena.riot.RDFLanguages ;
import org.apache.jena.riot.system.SerializationFactoryFinder ;
import org.apache.jena.riot.system.StreamRDF ;
import org.apache.jena.riot.system.StreamRDFLib ;
import org.apache.jena.sparql.ARQInternalErrorException ;
import org.apache.jena.sparql.SystemARQ ;
import org.apache.jena.sparql.core.* ;
import org.apache.jena.sparql.engine.Plan ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.binding.BindingRoot ;
import org.apache.jena.sparql.graph.GraphFactory ;
import org.apache.jena.sparql.graph.GraphOps ;
import org.apache.jena.sparql.graph.NodeTransform;
import org.apache.jena.sparql.graph.NodeTransformLib ;
import org.apache.jena.sparql.modify.request.* ;
import org.apache.jena.sparql.syntax.Element ;
import org.apache.jena.sparql.syntax.ElementGroup ;
import org.apache.jena.sparql.syntax.ElementNamedGraph ;
import org.apache.jena.sparql.syntax.ElementTriplesBlock ;
import org.apache.jena.sparql.util.Context ;
import org.apache.jena.update.UpdateException ;
/** Implementation of general purpose update request execution */
public class UpdateEngineWorker implements UpdateVisitor
{
protected final DatasetGraph datasetGraph ;
protected final boolean alwaysSilent = true ;
protected final Binding inputBinding; // Used for UpdateModify only
protected final Context context ;
public UpdateEngineWorker(DatasetGraph datasetGraph, Binding inputBinding, Context context) {
this.datasetGraph = datasetGraph ;
this.inputBinding = inputBinding ;
this.context = context ;
}
@Override
public void visit(UpdateDrop update)
{ execDropClear(update, false) ; }
@Override
public void visit(UpdateClear update)
{ execDropClear(update, true) ; }
protected void execDropClear(UpdateDropClear update, boolean isClear) {
if ( update.isAll() ) {
execDropClear(update, null, true); // Always clear.
execDropClearAllNamed(update, isClear);
} else if ( update.isAllNamed() )
execDropClearAllNamed(update, isClear);
else if ( update.isDefault() )
execDropClear(update, null, true);
else if ( update.isOneGraph() )
execDropClear(update, update.getGraph(), isClear);
else
throw new ARQInternalErrorException("Target is undefined: " + update.getTarget());
}
protected void execDropClear(UpdateDropClear update, Node g, boolean isClear) {
if ( !alwaysSilent ) {
if ( g != null && !datasetGraph.containsGraph(g) && !update.isSilent() )
error("No such graph: " + g);
}
if ( isClear ) {
if ( g == null || datasetGraph.containsGraph(g) )
graph(datasetGraph, g).clear();
} else
datasetGraph.removeGraph(g);
}
protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear) {
// Avoid ConcurrentModificationException
List<Node> list = Iter.toList(datasetGraph.listGraphNodes());
for ( Node gn : list )
execDropClear(update, gn, isClear);
}
@Override
public void visit(UpdateCreate update) {
Node g = update.getGraph();
if ( g == null )
return;
if ( datasetGraph.containsGraph(g) ) {
if ( !alwaysSilent && !update.isSilent() )
error("Graph store already contains graph : " + g);
return;
}
// In-memory specific
datasetGraph.addGraph(g, GraphFactory.createDefaultGraph());
}
@Override
public void visit(UpdateLoad update) {
// LOAD SILENT? iri ( INTO GraphRef )?
String source = update.getSource();
Node dest = update.getDest();
try {
// Read into temporary storage to protect against parse errors.
TypedInputStream s = RDFDataMgr.open(source);
Lang lang = RDFDataMgr.determineLang(source, s.getContentType(), null);
if ( RDFLanguages.isTriples(lang) ) {
// Triples
Graph g = GraphFactory.createGraphMem();
StreamRDF stream = StreamRDFLib.graph(g);
RDFDataMgr.parse(stream, s, source);
Graph g2 = graph(datasetGraph, dest);
GraphUtil.addInto(g2, g);
} else {
// Quads
if ( dest != null )
throw new UpdateException("Attempt to load quads into a graph");
DatasetGraph dsg = DatasetGraphFactory.create();
StreamRDF stream = StreamRDFLib.dataset(dsg);
RDFDataMgr.parse(stream, s, source);
Iterator<Quad> iter = dsg.find();
for ( ; iter.hasNext() ; ) {
Quad q = iter.next();
datasetGraph.add(q);
}
}
}
catch (RuntimeException ex) {
if ( !update.getSilent() ) {
if ( ex instanceof UpdateException )
throw (UpdateException)ex;
throw new UpdateException("Failed to LOAD '" + source + "'", ex);
}
}
}
@Override
public void visit(UpdateAdd update) {
// ADD SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
if ( !validBinaryGraphOp(update) )
return;
if ( update.getSrc().equals(update.getDest()) )
return;
// Different source and destination.
gsAddTriples(datasetGraph, update.getSrc(), update.getDest());
}
@Override
public void visit(UpdateCopy update) {
// COPY SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
if ( !validBinaryGraphOp(update) )
return;
if ( update.getSrc().equals(update.getDest()) )
return;
gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent());
}
@Override
public void visit(UpdateMove update) {
// MOVE SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
if ( !validBinaryGraphOp(update) )
return;
if ( update.getSrc().equals(update.getDest()) )
return;
// MOVE (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
// Difefrent source and destination.
gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent());
gsDrop(datasetGraph, update.getSrc(), true);
}
private boolean validBinaryGraphOp(UpdateBinaryOp update) {
if ( update.getSrc().isDefault() )
return true;
if ( update.getSrc().isOneNamedGraph() ) {
Node gn = update.getSrc().getGraph();
if ( !datasetGraph.containsGraph(gn) ) {
if ( !update.getSilent() )
error("No such graph: " + gn);
return false;
}
return true;
}
error("Invalid source target for oepration; " + update.getSrc());
return false;
}
// ----
// Core operations
/** Copy from src to dst : copy overwrites (= deletes) the old contents */
protected static void gsCopy(DatasetGraph dsg, Target src, Target dest, boolean isSilent)
{
if ( dest.equals(src) )
return ;
gsClear(dsg, dest, true) ;
gsAddTriples(dsg, src, dest) ;
}
/** Add triples from src to dest */
protected static void gsAddTriples(DatasetGraph dsg, Target src, Target dest) {
Graph gSrc = graph(dsg, src);
Graph gDest = graph(dsg, dest);
// Avoids concurrency problems by reading fully before writing
ThresholdPolicy<Triple> policy = ThresholdPolicyFactory.policyFromContext(dsg.getContext());
DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory());
try {
Iterator<Triple> triples = gSrc.find(null, null, null);
db.addAll(triples);
Iter.close(triples);
GraphOps.addAll(gDest, db.iterator());
}
finally {
db.close();
}
}
/** Clear target */
protected static void gsClear(DatasetGraph dsg, Target target, boolean isSilent) {
// No create - we tested earlier.
Graph g = graph(dsg, target);
g.clear();
}
/** Remove the target graph */
protected static void gsDrop(DatasetGraph dsg, Target target, boolean isSilent) {
if ( target.isDefault() )
dsg.getDefaultGraph().clear();
else
dsg.removeGraph(target.getGraph());
}
// ----
@Override
public Sink<Quad> createInsertDataSink() {
return new Sink<Quad>() {
@Override
public void send(Quad quad) {
addTodatasetGraph(datasetGraph, quad);
}
@Override
public void flush() {
SystemARQ.sync(datasetGraph);
}
@Override
public void close() {}
};
}
@Override
public void visit(UpdateDataInsert update) {
for ( Quad quad : update.getQuads() )
addTodatasetGraph(datasetGraph, quad);
}
@Override
public Sink<Quad> createDeleteDataSink() {
return new Sink<Quad>() {
@Override
public void send(Quad quad) {
deleteFromDatasetGraph(datasetGraph, quad);
}
@Override
public void flush() {
SystemARQ.sync(datasetGraph);
}
@Override
public void close() {}
};
}
@Override
public void visit(UpdateDataDelete update) {
for ( Quad quad : update.getQuads() )
deleteFromDatasetGraph(datasetGraph, quad);
}
@Override
public void visit(UpdateDeleteWhere update) {
List<Quad> quads = update.getQuads() ;
// Removed from SPARQL : Convert bNodes to named variables first.
//quads = convertBNodesToVariables(quads) ;
// Convert quads to a pattern.
Element el = elementFromQuads(quads) ;
// Decided to serialize the bindings, but could also have decided to
// serialize the quads after applying the template instead.
ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory());
try {
Iterator<Binding> bindings = evalBindings(el, null);
db.addAll(bindings);
Iter.close(bindings);
Iterator<Binding> it = db.iterator();
execDelete(datasetGraph, quads, null, it);
Iter.close(it);
}
finally {
db.close();
}
}
@Override
public void visit(UpdateModify update) {
Node withGraph = update.getWithIRI();
Element elt = update.getWherePattern();
// null or a dataset for USING clause.
// USING/USING NAMED
DatasetGraph dsg = processUsing(update);
// -------------------
// WITH
// USING overrides WITH
if ( dsg == null && withGraph != null ) {
if ( false ) {
// Subtle difference : WITH <uri>... WHERE {}
// and an empty/unknown graph <uri>
// rewrite with GRAPH -> no match.
// redo as dataset with different default graph -> match
// SPARQL is unclear abotu what happens when the graph does not exist.
// The rewite means the raw query engine is used though.
// Ye Olde way - create a special dataset
dsg = processWith(update) ;
withGraph = null ;
}
else
// Better,
// Wrap WHERE clause in GRAPH <with_uri>
// and can remove DatasetGraphAltDefaultGraph,
// or at least comment its implications.
elt = new ElementNamedGraph(withGraph, elt) ;
}
// WITH :
// The quads from deletion/insertion are altered when streamed
// into the templates later on.
// -------------------
if ( dsg == null )
dsg = datasetGraph ;
Query query = elementToQuery(elt) ;
ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ;
try {
Iterator<Binding> bindings = evalBindings(query, dsg, inputBinding, context);
if ( false ) {
List<Binding> x = Iter.toList(bindings);
System.out.printf("====>> Bindings (%d)\n", x.size());
Iter.print(System.out, x.iterator());
System.out.println("====<<");
bindings = Iter.iter(x);
}
db.addAll(bindings);
Iter.close(bindings);
Iterator<Binding> it = db.iterator();
execDelete(dsg, update.getDeleteQuads(), withGraph, it);
Iter.close(it);
Iterator<Binding> it2 = db.iterator();
execInsert(dsg, update.getInsertQuads(), withGraph, it2);
Iter.close(it2);
}
finally {
db.close();
}
}
// Indirection for subsystems to support USING/USING NAMED.
protected DatasetGraph processUsing(UpdateModify update) {
if ( update.getUsing().size() == 0 && update.getUsingNamed().size() == 0 )
return null;
return DynamicDatasets.dynamicDataset(update.getUsing(), update.getUsingNamed(), datasetGraph, false);
}
protected DatasetGraph processWith(UpdateModify update) {
Node withGraph = update.getWithIRI();
if ( withGraph == null )
return null;
Graph g = graphOrDummy(datasetGraph, withGraph);
DatasetGraph dsg = new DatasetGraphAltDefaultGraph(datasetGraph, g);
return dsg;
}
private Graph graphOrDummy(DatasetGraph dsg, Node gn) {
Graph g = graph(datasetGraph, gn);
if ( g == null )
g = GraphFactory.createGraphMem();
return g;
}
protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads) {
NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables();
return NodeTransformLib.transformQuads(bnodesToVariables, quads);
}
protected Element elementFromQuads(List<Quad> quads) {
ElementGroup el = new ElementGroup();
ElementTriplesBlock x = new ElementTriplesBlock();
// Maybe empty??
el.addElement(x);
Node g = Quad.defaultGraphNodeGenerated;
for ( Quad q : quads ) {
if ( q.getGraph() != g ) {
g = q.getGraph();
x = new ElementTriplesBlock();
if ( g == null || g == Quad.defaultGraphNodeGenerated )
el.addElement(x);
else {
ElementNamedGraph eng = new ElementNamedGraph(g, x);
el.addElement(eng);
}
}
x.addTriple(q.asTriple());
}
return el;
}
// JENA-1059 : optimization : process templates for ground triples and do these once.
// execDelete ; execInsert
// Quads involving only IRIs and literals do not change from binding to
// binding so any inserts, rather than repeatedly if they are going to be
// done at all. Note bNodes (if legal at this point) change from template
// instantiation to instantiation.
/**
* Split quads into ground terms (no variables) and templated quads.
* @param quads
* @return Pair of (ground quads, templated quads)
*/
private static Pair<List<Quad>, List<Quad>> split(Collection<Quad> quads) {
// Guess size.
// Pre-size in case large (i.e. 10K+).
List<Quad> constQuads = new ArrayList<>(quads.size()) ;
// ... in which case we assume the templated triples are small / non-existent.
List<Quad> templateQuads = new ArrayList<>() ;
quads.forEach((q)-> {
if ( constQuad(q))
constQuads.add(q) ;
else
templateQuads.add(q) ;
}) ;
return Pair.create(constQuads, templateQuads);
}
private static boolean constQuad(Quad quad) {
return constTerm(quad.getGraph()) && constTerm(quad.getSubject()) &&
constTerm(quad.getPredicate()) && constTerm(quad.getObject());
}
private static boolean constTerm(Node n) {
return n.isURI() || n.isLiteral() ;
}
protected static void execDelete(DatasetGraph dsg, List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) {
Pair<List<Quad>, List<Quad>> p = split(quads) ;
execDelete(dsg, p.getLeft(), p.getRight(), dftGraph, bindings) ;
}
protected static void execDelete(DatasetGraph dsg, List<Quad> onceQuads, List<Quad> templateQuads, Node dftGraph, Iterator<Binding> bindings) {
if ( onceQuads != null && bindings.hasNext() ) {
onceQuads = remapDefaultGraph(onceQuads, dftGraph) ;
onceQuads.forEach(q->deleteFromDatasetGraph(dsg, q)) ;
}
Iterator<Quad> it = template(templateQuads, dftGraph, bindings) ;
if ( it == null )
return ;
it.forEachRemaining(q->deleteFromDatasetGraph(dsg, q)) ;
}
protected static void execInsert(DatasetGraph dsg, List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) {
Pair<List<Quad>, List<Quad>> p = split(quads) ;
execInsert(dsg, p.getLeft(), p.getRight(), dftGraph, bindings) ;
}
protected static void execInsert(DatasetGraph dsg, List<Quad> onceQuads, List<Quad> templateQuads, Node dftGraph, Iterator<Binding> bindings) {
if ( onceQuads != null && bindings.hasNext() ) {
onceQuads = remapDefaultGraph(onceQuads, dftGraph) ;
onceQuads.forEach((q)->addTodatasetGraph(dsg, q)) ;
}
Iterator<Quad> it = template(templateQuads, dftGraph, bindings) ;
if ( it == null )
return ;
it.forEachRemaining((q)->addTodatasetGraph(dsg, q)) ;
}
// Catch all individual adds of quads
private static void addTodatasetGraph(DatasetGraph datasetGraph, Quad quad) {
// Check legal triple.
if ( quad.isLegalAsData() )
datasetGraph.add(quad);
// Else drop.
// Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ;
}
// Catch all individual deletes of quads
private static void deleteFromDatasetGraph(DatasetGraph datasetGraph, Quad quad) {
datasetGraph.delete(quad);
}
protected Query elementToQuery(Element pattern) {
if ( pattern == null )
return null;
Query query = new Query();
query.setQueryPattern(pattern);
query.setQuerySelectType();
query.setQueryResultStar(true);
query.setResultVars();
return query;
}
protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph) {
return evalBindings(elementToQuery(pattern), dftGraph);
}
protected Iterator<Binding> evalBindings(Query query, Node dftGraph) {
DatasetGraph dsg = datasetGraph;
if ( query != null ) {
if ( dftGraph != null ) {
Graph g = graphOrDummy(dsg, dftGraph);
dsg = new DatasetGraphAltDefaultGraph(dsg, g);
}
}
return evalBindings(query, dsg, inputBinding, context);
}
protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding inputBinding, Context context) {
// The UpdateProcessorBase already copied the context and made it safe
// ... but that's going to happen again :-(
Iterator<Binding> toReturn;
if ( query != null ) {
Plan plan = QueryExecutionFactory.createPlan(query, dsg, inputBinding, context);
toReturn = plan.iterator();
} else {
toReturn = Iter.singleton((null != inputBinding) ? inputBinding : BindingRoot.create());
}
return toReturn;
}
protected static Graph graph(DatasetGraph datasetGraph, Node gn) {
if ( gn == null || gn == Quad.defaultGraphNodeGenerated )
return datasetGraph.getDefaultGraph();
else
return datasetGraph.getGraph(gn);
}
protected static Graph graph(DatasetGraph datasetGraph, Target target) {
if ( target.isDefault() )
return datasetGraph.getDefaultGraph();
if ( target.isOneNamedGraph() )
return graph(datasetGraph, target.getGraph());
error("Target does not name one graph: " + target);
return null;
}
protected static void error(String msg) {
throw new UpdateException(msg);
}
}
| jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.modify;
import static org.apache.jena.sparql.modify.TemplateLib.remapDefaultGraph ;
import static org.apache.jena.sparql.modify.TemplateLib.template ;
import java.util.ArrayList ;
import java.util.Collection ;
import java.util.Iterator ;
import java.util.List ;
import org.apache.jena.atlas.data.BagFactory ;
import org.apache.jena.atlas.data.DataBag ;
import org.apache.jena.atlas.data.ThresholdPolicy ;
import org.apache.jena.atlas.data.ThresholdPolicyFactory ;
import org.apache.jena.atlas.iterator.Iter ;
import org.apache.jena.atlas.lib.Pair ;
import org.apache.jena.atlas.lib.Sink ;
import org.apache.jena.atlas.web.TypedInputStream ;
import org.apache.jena.graph.Graph ;
import org.apache.jena.graph.GraphUtil ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.query.Query ;
import org.apache.jena.query.QueryExecutionFactory ;
import org.apache.jena.riot.Lang ;
import org.apache.jena.riot.RDFDataMgr ;
import org.apache.jena.riot.RDFLanguages ;
import org.apache.jena.riot.system.SerializationFactoryFinder ;
import org.apache.jena.riot.system.StreamRDF ;
import org.apache.jena.riot.system.StreamRDFLib ;
import org.apache.jena.sparql.ARQInternalErrorException ;
import org.apache.jena.sparql.SystemARQ ;
import org.apache.jena.sparql.core.* ;
import org.apache.jena.sparql.engine.Plan ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.engine.binding.BindingRoot ;
import org.apache.jena.sparql.graph.GraphFactory ;
import org.apache.jena.sparql.graph.GraphOps ;
import org.apache.jena.sparql.graph.NodeTransform;
import org.apache.jena.sparql.graph.NodeTransformLib ;
import org.apache.jena.sparql.modify.request.* ;
import org.apache.jena.sparql.syntax.Element ;
import org.apache.jena.sparql.syntax.ElementGroup ;
import org.apache.jena.sparql.syntax.ElementNamedGraph ;
import org.apache.jena.sparql.syntax.ElementTriplesBlock ;
import org.apache.jena.sparql.util.Context ;
import org.apache.jena.update.UpdateException ;
/** Implementation of general purpose update request execution */
public class UpdateEngineWorker implements UpdateVisitor
{
protected final DatasetGraph datasetGraph ;
protected final boolean alwaysSilent = true ;
protected final Binding inputBinding; // Used for UpdateModify and UpdateDeleteWhere only
protected final Context context ;
public UpdateEngineWorker(DatasetGraph datasetGraph, Binding inputBinding, Context context)
{
this.datasetGraph = datasetGraph ;
this.inputBinding = inputBinding ;
this.context = context ;
}
@Override
public void visit(UpdateDrop update)
{ execDropClear(update, false) ; }
@Override
public void visit(UpdateClear update)
{ execDropClear(update, true) ; }
protected void execDropClear(UpdateDropClear update, boolean isClear)
{
if ( update.isAll() )
{
execDropClear(update, null, true) ; // Always clear.
execDropClearAllNamed(update, isClear) ;
}
else if ( update.isAllNamed() )
execDropClearAllNamed(update, isClear) ;
else if ( update.isDefault() )
execDropClear(update, null, true) ;
else if ( update.isOneGraph() )
execDropClear(update, update.getGraph(), isClear) ;
else
throw new ARQInternalErrorException("Target is undefined: "+update.getTarget()) ;
}
protected void execDropClear(UpdateDropClear update, Node g, boolean isClear)
{
if ( ! alwaysSilent )
{
if ( g != null && ! datasetGraph.containsGraph(g) && ! update.isSilent())
error("No such graph: "+g) ;
}
if ( isClear )
{
if ( g == null || datasetGraph.containsGraph(g) )
graph(datasetGraph, g).clear() ;
}
else
datasetGraph.removeGraph(g) ;
}
protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear)
{
// Avoid ConcurrentModificationException
List<Node> list = Iter.toList(datasetGraph.listGraphNodes()) ;
for ( Node gn : list )
execDropClear(update, gn, isClear) ;
}
@Override
public void visit(UpdateCreate update)
{
Node g = update.getGraph() ;
if ( g == null )
return ;
if ( datasetGraph.containsGraph(g) )
{
if ( ! alwaysSilent && ! update.isSilent() )
error("Graph store already contains graph : "+g) ;
return ;
}
// In-memory specific
datasetGraph.addGraph(g, GraphFactory.createDefaultGraph()) ;
}
@Override
public void visit(UpdateLoad update)
{
String source = update.getSource() ;
Node dest = update.getDest() ;
try {
// Read into temporary storage to protect against parse errors.
TypedInputStream s = RDFDataMgr.open(source) ;
Lang lang = RDFDataMgr.determineLang(source, s.getContentType(), null) ;
if ( RDFLanguages.isTriples(lang) ) {
// Triples
Graph g = GraphFactory.createGraphMem() ;
StreamRDF stream = StreamRDFLib.graph(g) ;
RDFDataMgr.parse(stream, s, source) ;
Graph g2 = graph(datasetGraph, dest) ;
GraphUtil.addInto(g2, g) ;
} else {
// Quads
if ( dest != null )
throw new UpdateException("Attempt to load quads into a graph") ;
DatasetGraph dsg = DatasetGraphFactory.create() ;
StreamRDF stream = StreamRDFLib.dataset(dsg) ;
RDFDataMgr.parse(stream, s, source) ;
Iterator<Quad> iter = dsg.find() ;
for ( ; iter.hasNext() ; )
{
Quad q = iter.next() ;
datasetGraph.add(q) ;
}
}
} catch (RuntimeException ex)
{
if ( ! update.getSilent() )
{
if ( ex instanceof UpdateException )
throw (UpdateException)ex ;
throw new UpdateException("Failed to LOAD '"+source+"'", ex) ;
}
}
}
@Override
public void visit(UpdateAdd update)
{
if ( ! validBinaryGraphOp(update) ) return ;
if ( update.getSrc().equals(update.getDest()) )
return ;
// ADD (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
// Different source and destination.
gsCopyTriples(datasetGraph, update.getSrc(), update.getDest()) ;
}
@Override
public void visit(UpdateCopy update)
{
if ( ! validBinaryGraphOp(update) ) return ;
if ( update.getSrc().equals(update.getDest()) )
return ;
// COPY (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent()) ;
}
@Override
public void visit(UpdateMove update)
{
if ( ! validBinaryGraphOp(update) ) return ;
if ( update.getSrc().equals(update.getDest()) )
return ;
// MOVE (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
// Difefrent source and destination.
gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent()) ;
gsDrop(datasetGraph, update.getSrc(), true) ;
}
private boolean validBinaryGraphOp(UpdateBinaryOp update)
{
if ( update.getSrc().isDefault() )
return true ;
if ( update.getSrc().isOneNamedGraph() )
{
Node gn = update.getSrc().getGraph() ;
if ( ! datasetGraph.containsGraph(gn) )
{
if ( ! update.getSilent() )
error("No such graph: "+gn) ;
return false ;
}
return true ;
}
error("Invalid source target for oepration; "+update.getSrc()) ;
return false ;
}
// ----
// Core operations
protected static void gsCopy(DatasetGraph dsg, Target src, Target dest, boolean isSilent)
{
if ( dest.equals(src) )
return ;
gsClear(dsg, dest, true) ;
gsCopyTriples(dsg, src, dest) ;
}
protected static void gsCopyTriples(DatasetGraph dsg, Target src, Target dest)
{
Graph gSrc = graph(dsg, src) ;
Graph gDest = graph(dsg, dest) ;
// Avoids concurrency problems by reading fully before writing
ThresholdPolicy<Triple> policy = ThresholdPolicyFactory.policyFromContext(dsg.getContext());
DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory()) ;
try
{
Iterator<Triple> triples = gSrc.find(null, null, null) ;
db.addAll(triples) ;
Iter.close(triples) ;
GraphOps.addAll(gDest, db.iterator()) ;
}
finally { db.close() ; }
}
protected static void gsClear(DatasetGraph dsg, Target target, boolean isSilent)
{
// No create - we tested earlier.
Graph g = graph(dsg, target) ;
g.clear() ;
}
protected static void gsDrop(DatasetGraph dsg, Target target, boolean isSilent)
{
if ( target.isDefault() )
dsg.getDefaultGraph().clear() ;
else
dsg.removeGraph(target.getGraph()) ;
}
// ----
@Override
public Sink<Quad> createInsertDataSink()
{
return new Sink<Quad>()
{
@Override
public void send(Quad quad)
{
addTodatasetGraph(datasetGraph, quad);
}
@Override
public void flush()
{
SystemARQ.sync(datasetGraph);
}
@Override
public void close()
{ }
};
}
@Override
public void visit(UpdateDataInsert update)
{
for ( Quad quad : update.getQuads() )
addTodatasetGraph(datasetGraph, quad) ;
}
@Override
public Sink<Quad> createDeleteDataSink()
{
return new Sink<Quad>()
{
@Override
public void send(Quad quad)
{
deleteFromdatasetGraph(datasetGraph, quad);
}
@Override
public void flush()
{
SystemARQ.sync(datasetGraph);
}
@Override
public void close()
{ }
};
}
@Override
public void visit(UpdateDataDelete update)
{
for ( Quad quad : update.getQuads() )
deleteFromdatasetGraph(datasetGraph, quad) ;
}
@Override
public void visit(UpdateDeleteWhere update)
{
List<Quad> quads = update.getQuads() ;
// Convert bNodes to named variables first.
// if ( false )
// // Removed from SPARQL
// quads = convertBNodesToVariables(quads) ;
// Convert quads to a pattern.
Element el = elementFromQuads(quads) ;
// Decided to serialize the bindings, but could also have decided to
// serialize the quads after applying the template instead.
ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ;
try
{
Iterator<Binding> bindings = evalBindings(el, null) ;
db.addAll(bindings) ;
Iter.close(bindings) ;
Iterator<Binding> it = db.iterator() ;
execDelete(datasetGraph, quads, null, it) ;
Iter.close(it) ;
}
finally
{
db.close() ;
}
}
@Override
public void visit(UpdateModify update)
{
Node withGraph = update.getWithIRI() ;
Element elt = update.getWherePattern() ;
// null or a dataset for USING clause.
// USING/USING NAMED
DatasetGraph dsg = processUsing(update) ;
// -------------------
// WITH
// USING overrides WITH
if ( dsg == null && withGraph != null ) {
if ( false ) {
// Ye Olde way - create a special dataset
dsg = processWith(update) ;
withGraph = null ;
}
else
// Better,
// Wrap WHERE clause in GRAPH <with_uri>
// and can remove DatasetGraphAltDefaultGraph,
// or at least comment its implications.
elt = new ElementNamedGraph(withGraph, elt) ;
}
// WITH :
// The quads from deletion/insertion are altered when streamed
// into the templates later on.
// -------------------
if ( dsg == null )
dsg = datasetGraph ;
Query query = elementToQuery(elt) ;
ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ;
try
{
Iterator<Binding> bindings = evalBindings(query, dsg, inputBinding, context) ;
if ( false )
{
List<Binding> x = Iter.toList(bindings) ;
System.out.printf("====>> Bindings (%d)\n", x.size()) ;
Iter.print(System.out, x.iterator()) ;
System.out.println("====<<") ;
bindings = Iter.iter(x) ;
}
db.addAll(bindings) ;
Iter.close(bindings) ;
Iterator<Binding> it = db.iterator() ;
execDelete(dsg, update.getDeleteQuads(), withGraph, it) ;
Iter.close(it) ;
Iterator<Binding> it2 = db.iterator() ;
execInsert(dsg, update.getInsertQuads(), withGraph, it2) ;
Iter.close(it2) ;
}
finally
{
db.close() ;
}
}
// Indirection for subsystems to support USING/USING NAMED.
protected DatasetGraph processUsing(UpdateModify update)
{
if ( update.getUsing().size() == 0 && update.getUsingNamed().size() == 0 )
return null ;
return DynamicDatasets.dynamicDataset(update.getUsing(), update.getUsingNamed(), datasetGraph, false) ;
}
protected DatasetGraph processWith(UpdateModify update)
{
Node withGraph = update.getWithIRI() ;
if ( withGraph == null )
return null ;
Graph g = graphOrDummy(datasetGraph, withGraph) ;
DatasetGraph dsg = new DatasetGraphAltDefaultGraph(datasetGraph, g) ;
return dsg ;
}
private Graph graphOrDummy(DatasetGraph dsg, Node gn)
{
Graph g = graph(datasetGraph, gn) ;
if ( g == null )
g = GraphFactory.createGraphMem() ;
return g ;
}
protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads)
{
NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables() ;
return NodeTransformLib.transformQuads(bnodesToVariables, quads) ;
}
protected Element elementFromQuads(List<Quad> quads)
{
ElementGroup el = new ElementGroup() ;
ElementTriplesBlock x = new ElementTriplesBlock() ;
// Maybe empty??
el.addElement(x) ;
Node g = Quad.defaultGraphNodeGenerated ;
for ( Quad q : quads )
{
if ( q.getGraph() != g )
{
g = q.getGraph() ;
x = new ElementTriplesBlock() ;
if ( g == null || g == Quad.defaultGraphNodeGenerated )
el.addElement(x) ;
else
{
ElementNamedGraph eng = new ElementNamedGraph(g, x) ;
el.addElement(eng) ;
}
}
x.addTriple(q.asTriple()) ;
}
return el ;
}
// JENA-1059
// execDelete ; execInsert
// Quads involving only IRIs and literals do not change from binding to
// binding so any inserts, rather than repeatedly if they are going to be
// done at all. Note bNodes (if legal at this point) change from template
// instantiation to instantiation.
/**
* Split quads into ground terms (no variables) and templated quads.
* @param quads
* @return Pair of (ground quads, templated quads)
*/
private static Pair<List<Quad>, List<Quad>> split(Collection<Quad> quads) {
// Guess size.
// Pre-size in case large (i.e. 10K+).
List<Quad> constQuads = new ArrayList<>(quads.size()) ;
// ... in which case we assume the templated triples are small / non-existent.
List<Quad> templateQuads = new ArrayList<>() ;
quads.forEach((q)-> {
if ( constQuad(q))
constQuads.add(q) ;
else
templateQuads.add(q) ;
}) ;
return Pair.create(constQuads, templateQuads);
}
private static boolean constQuad(Quad quad) {
return constTerm(quad.getGraph()) &&
constTerm(quad.getSubject()) &&
constTerm(quad.getPredicate()) &&
constTerm(quad.getObject()) ;
}
private static boolean constTerm(Node n) {
return n.isURI() || n.isLiteral() ;
}
protected static void execDelete(DatasetGraph dsg, List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) {
Pair<List<Quad>, List<Quad>> p = split(quads) ;
execDelete(dsg, p.getLeft(), p.getRight(), dftGraph, bindings) ;
}
protected static void execDelete(DatasetGraph dsg, List<Quad> onceQuads, List<Quad> templateQuads, Node dftGraph, Iterator<Binding> bindings) {
if ( onceQuads != null && bindings.hasNext() ) {
onceQuads = remapDefaultGraph(onceQuads, dftGraph) ;
onceQuads.forEach(q->deleteFromdatasetGraph(dsg, q)) ;
}
Iterator<Quad> it = template(templateQuads, dftGraph, bindings) ;
if ( it == null )
return ;
it.forEachRemaining(q->deleteFromdatasetGraph(dsg, q)) ;
}
protected static void execInsert(DatasetGraph dsg, List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) {
Pair<List<Quad>, List<Quad>> p = split(quads) ;
execInsert(dsg, p.getLeft(), p.getRight(), dftGraph, bindings) ;
}
protected static void execInsert(DatasetGraph dsg, List<Quad> onceQuads, List<Quad> templateQuads, Node dftGraph, Iterator<Binding> bindings) {
if ( onceQuads != null && bindings.hasNext() ) {
onceQuads = remapDefaultGraph(onceQuads, dftGraph) ;
onceQuads.forEach((q)->addTodatasetGraph(dsg, q)) ;
}
Iterator<Quad> it = template(templateQuads, dftGraph, bindings) ;
if ( it == null )
return ;
it.forEachRemaining((q)->addTodatasetGraph(dsg, q)) ;
}
// Catch all individual adds of quads
private static void addTodatasetGraph(DatasetGraph datasetGraph, Quad quad)
{
// Check legal triple.
if ( quad.isLegalAsData() )
datasetGraph.add(quad);
// Else drop.
//Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ;
}
// Catch all individual deletes of quads
private static void deleteFromdatasetGraph(DatasetGraph datasetGraph, Quad quad)
{
datasetGraph.delete(quad) ;
}
protected Query elementToQuery(Element pattern)
{
if ( pattern == null )
return null ;
Query query = new Query() ;
query.setQueryPattern(pattern) ;
query.setQuerySelectType() ;
query.setQueryResultStar(true) ;
query.setResultVars() ;
return query ;
}
protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph)
{
return evalBindings(elementToQuery(pattern), dftGraph) ;
}
protected Iterator<Binding> evalBindings(Query query, Node dftGraph)
{
DatasetGraph dsg = datasetGraph ;
if ( query != null )
{
if ( dftGraph != null )
{
Graph g = graphOrDummy(dsg, dftGraph) ;
dsg = new DatasetGraphAltDefaultGraph(dsg, g) ;
}
}
return evalBindings(query, dsg, inputBinding, context) ;
}
protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding inputBinding, Context context)
{
// SET UP CONTEXT
// The UpdateProcessorBase already copied the context and made it safe ... but that's going to happen again :-(
Iterator<Binding> toReturn ;
if ( query != null )
{
Plan plan = QueryExecutionFactory.createPlan(query, dsg, inputBinding, context) ;
toReturn = plan.iterator();
}
else
{
toReturn = Iter.singleton((null != inputBinding) ? inputBinding : BindingRoot.create()) ;
}
return toReturn ;
}
protected static Graph graph(DatasetGraph datasetGraph, Node gn)
{
if ( gn == null || gn == Quad.defaultGraphNodeGenerated )
return datasetGraph.getDefaultGraph() ;
else
return datasetGraph.getGraph(gn) ;
}
protected static Graph graph(DatasetGraph datasetGraph, Target target)
{
if ( target.isDefault() )
return datasetGraph.getDefaultGraph() ;
if ( target.isOneNamedGraph() )
return graph(datasetGraph, target.getGraph()) ;
error("Target does not name one graph: "+target) ;
return null ;
}
protected static void error(String msg)
{
throw new UpdateException(msg) ;
}
}
| Tidy and reformat. Add comments.
| jena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java | Tidy and reformat. Add comments. | <ide><path>ena-arq/src/main/java/org/apache/jena/sparql/modify/UpdateEngineWorker.java
<ide> {
<ide> protected final DatasetGraph datasetGraph ;
<ide> protected final boolean alwaysSilent = true ;
<del> protected final Binding inputBinding; // Used for UpdateModify and UpdateDeleteWhere only
<add> protected final Binding inputBinding; // Used for UpdateModify only
<ide> protected final Context context ;
<ide>
<del> public UpdateEngineWorker(DatasetGraph datasetGraph, Binding inputBinding, Context context)
<del> {
<add> public UpdateEngineWorker(DatasetGraph datasetGraph, Binding inputBinding, Context context) {
<ide> this.datasetGraph = datasetGraph ;
<ide> this.inputBinding = inputBinding ;
<ide> this.context = context ;
<ide> public void visit(UpdateClear update)
<ide> { execDropClear(update, true) ; }
<ide>
<del> protected void execDropClear(UpdateDropClear update, boolean isClear)
<del> {
<del> if ( update.isAll() )
<del> {
<del> execDropClear(update, null, true) ; // Always clear.
<del> execDropClearAllNamed(update, isClear) ;
<del> }
<del> else if ( update.isAllNamed() )
<del> execDropClearAllNamed(update, isClear) ;
<add> protected void execDropClear(UpdateDropClear update, boolean isClear) {
<add> if ( update.isAll() ) {
<add> execDropClear(update, null, true); // Always clear.
<add> execDropClearAllNamed(update, isClear);
<add> } else if ( update.isAllNamed() )
<add> execDropClearAllNamed(update, isClear);
<ide> else if ( update.isDefault() )
<del> execDropClear(update, null, true) ;
<add> execDropClear(update, null, true);
<ide> else if ( update.isOneGraph() )
<del> execDropClear(update, update.getGraph(), isClear) ;
<add> execDropClear(update, update.getGraph(), isClear);
<ide> else
<del> throw new ARQInternalErrorException("Target is undefined: "+update.getTarget()) ;
<del> }
<del>
<del> protected void execDropClear(UpdateDropClear update, Node g, boolean isClear)
<del> {
<del> if ( ! alwaysSilent )
<del> {
<del> if ( g != null && ! datasetGraph.containsGraph(g) && ! update.isSilent())
<del> error("No such graph: "+g) ;
<del> }
<del>
<del> if ( isClear )
<del> {
<add> throw new ARQInternalErrorException("Target is undefined: " + update.getTarget());
<add> }
<add>
<add> protected void execDropClear(UpdateDropClear update, Node g, boolean isClear) {
<add> if ( !alwaysSilent ) {
<add> if ( g != null && !datasetGraph.containsGraph(g) && !update.isSilent() )
<add> error("No such graph: " + g);
<add> }
<add>
<add> if ( isClear ) {
<ide> if ( g == null || datasetGraph.containsGraph(g) )
<del> graph(datasetGraph, g).clear() ;
<del> }
<del> else
<del> datasetGraph.removeGraph(g) ;
<del> }
<del>
<del> protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear)
<del> {
<add> graph(datasetGraph, g).clear();
<add> } else
<add> datasetGraph.removeGraph(g);
<add> }
<add>
<add> protected void execDropClearAllNamed(UpdateDropClear update, boolean isClear) {
<ide> // Avoid ConcurrentModificationException
<del> List<Node> list = Iter.toList(datasetGraph.listGraphNodes()) ;
<del>
<add> List<Node> list = Iter.toList(datasetGraph.listGraphNodes());
<add>
<ide> for ( Node gn : list )
<del> execDropClear(update, gn, isClear) ;
<del> }
<del>
<del> @Override
<del> public void visit(UpdateCreate update)
<del> {
<del> Node g = update.getGraph() ;
<add> execDropClear(update, gn, isClear);
<add> }
<add>
<add> @Override
<add> public void visit(UpdateCreate update) {
<add> Node g = update.getGraph();
<ide> if ( g == null )
<del> return ;
<del> if ( datasetGraph.containsGraph(g) )
<del> {
<del> if ( ! alwaysSilent && ! update.isSilent() )
<del> error("Graph store already contains graph : "+g) ;
<del> return ;
<del> }
<del> // In-memory specific
<del> datasetGraph.addGraph(g, GraphFactory.createDefaultGraph()) ;
<del> }
<del>
<del> @Override
<del> public void visit(UpdateLoad update)
<del> {
<del> String source = update.getSource() ;
<del> Node dest = update.getDest() ;
<add> return;
<add> if ( datasetGraph.containsGraph(g) ) {
<add> if ( !alwaysSilent && !update.isSilent() )
<add> error("Graph store already contains graph : " + g);
<add> return;
<add> }
<add> // In-memory specific
<add> datasetGraph.addGraph(g, GraphFactory.createDefaultGraph());
<add> }
<add>
<add> @Override
<add> public void visit(UpdateLoad update) {
<add> // LOAD SILENT? iri ( INTO GraphRef )?
<add> String source = update.getSource();
<add> Node dest = update.getDest();
<ide> try {
<ide> // Read into temporary storage to protect against parse errors.
<del> TypedInputStream s = RDFDataMgr.open(source) ;
<del> Lang lang = RDFDataMgr.determineLang(source, s.getContentType(), null) ;
<del>
<add> TypedInputStream s = RDFDataMgr.open(source);
<add> Lang lang = RDFDataMgr.determineLang(source, s.getContentType(), null);
<add>
<ide> if ( RDFLanguages.isTriples(lang) ) {
<ide> // Triples
<del> Graph g = GraphFactory.createGraphMem() ;
<del> StreamRDF stream = StreamRDFLib.graph(g) ;
<del> RDFDataMgr.parse(stream, s, source) ;
<del> Graph g2 = graph(datasetGraph, dest) ;
<del> GraphUtil.addInto(g2, g) ;
<add> Graph g = GraphFactory.createGraphMem();
<add> StreamRDF stream = StreamRDFLib.graph(g);
<add> RDFDataMgr.parse(stream, s, source);
<add> Graph g2 = graph(datasetGraph, dest);
<add> GraphUtil.addInto(g2, g);
<ide> } else {
<ide> // Quads
<ide> if ( dest != null )
<del> throw new UpdateException("Attempt to load quads into a graph") ;
<del> DatasetGraph dsg = DatasetGraphFactory.create() ;
<del> StreamRDF stream = StreamRDFLib.dataset(dsg) ;
<del> RDFDataMgr.parse(stream, s, source) ;
<del> Iterator<Quad> iter = dsg.find() ;
<del> for ( ; iter.hasNext() ; )
<del> {
<del> Quad q = iter.next() ;
<del> datasetGraph.add(q) ;
<add> throw new UpdateException("Attempt to load quads into a graph");
<add> DatasetGraph dsg = DatasetGraphFactory.create();
<add> StreamRDF stream = StreamRDFLib.dataset(dsg);
<add> RDFDataMgr.parse(stream, s, source);
<add> Iterator<Quad> iter = dsg.find();
<add> for ( ; iter.hasNext() ; ) {
<add> Quad q = iter.next();
<add> datasetGraph.add(q);
<ide> }
<ide> }
<del> } catch (RuntimeException ex)
<del> {
<del> if ( ! update.getSilent() )
<del> {
<add> }
<add> catch (RuntimeException ex) {
<add> if ( !update.getSilent() ) {
<ide> if ( ex instanceof UpdateException )
<del> throw (UpdateException)ex ;
<del> throw new UpdateException("Failed to LOAD '"+source+"'", ex) ;
<del> }
<del> }
<del> }
<del>
<del> @Override
<del> public void visit(UpdateAdd update)
<del> {
<del> if ( ! validBinaryGraphOp(update) ) return ;
<add> throw (UpdateException)ex;
<add> throw new UpdateException("Failed to LOAD '" + source + "'", ex);
<add> }
<add> }
<add> }
<add>
<add> @Override
<add> public void visit(UpdateAdd update) {
<add> // ADD SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<add> if ( !validBinaryGraphOp(update) )
<add> return;
<ide> if ( update.getSrc().equals(update.getDest()) )
<del> return ;
<del> // ADD (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<add> return;
<ide> // Different source and destination.
<del> gsCopyTriples(datasetGraph, update.getSrc(), update.getDest()) ;
<del> }
<del>
<del> @Override
<del> public void visit(UpdateCopy update)
<del> {
<del> if ( ! validBinaryGraphOp(update) ) return ;
<add> gsAddTriples(datasetGraph, update.getSrc(), update.getDest());
<add> }
<add>
<add> @Override
<add> public void visit(UpdateCopy update) {
<add> // COPY SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<add> if ( !validBinaryGraphOp(update) )
<add> return;
<ide> if ( update.getSrc().equals(update.getDest()) )
<del> return ;
<del> // COPY (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<del> gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent()) ;
<del> }
<del>
<del> @Override
<del> public void visit(UpdateMove update)
<del> {
<del> if ( ! validBinaryGraphOp(update) ) return ;
<add> return;
<add> gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent());
<add> }
<add>
<add> @Override
<add> public void visit(UpdateMove update) {
<add> // MOVE SILENT? (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<add> if ( !validBinaryGraphOp(update) )
<add> return;
<ide> if ( update.getSrc().equals(update.getDest()) )
<del> return ;
<add> return;
<ide> // MOVE (DEFAULT or GRAPH) TO (DEFAULT or GRAPH)
<ide> // Difefrent source and destination.
<del> gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent()) ;
<del> gsDrop(datasetGraph, update.getSrc(), true) ;
<del> }
<del>
<del> private boolean validBinaryGraphOp(UpdateBinaryOp update)
<del> {
<add> gsCopy(datasetGraph, update.getSrc(), update.getDest(), update.getSilent());
<add> gsDrop(datasetGraph, update.getSrc(), true);
<add> }
<add>
<add> private boolean validBinaryGraphOp(UpdateBinaryOp update) {
<ide> if ( update.getSrc().isDefault() )
<del> return true ;
<del>
<del> if ( update.getSrc().isOneNamedGraph() )
<del> {
<del> Node gn = update.getSrc().getGraph() ;
<del> if ( ! datasetGraph.containsGraph(gn) )
<del> {
<del> if ( ! update.getSilent() )
<del> error("No such graph: "+gn) ;
<del> return false ;
<del> }
<del> return true ;
<del> }
<del> error("Invalid source target for oepration; "+update.getSrc()) ;
<del> return false ;
<add> return true;
<add>
<add> if ( update.getSrc().isOneNamedGraph() ) {
<add> Node gn = update.getSrc().getGraph();
<add> if ( !datasetGraph.containsGraph(gn) ) {
<add> if ( !update.getSilent() )
<add> error("No such graph: " + gn);
<add> return false;
<add> }
<add> return true;
<add> }
<add> error("Invalid source target for oepration; " + update.getSrc());
<add> return false;
<ide> }
<ide>
<ide> // ----
<ide> // Core operations
<del>
<add> /** Copy from src to dst : copy overwrites (= deletes) the old contents */
<ide> protected static void gsCopy(DatasetGraph dsg, Target src, Target dest, boolean isSilent)
<ide> {
<ide> if ( dest.equals(src) )
<ide> return ;
<ide> gsClear(dsg, dest, true) ;
<del> gsCopyTriples(dsg, src, dest) ;
<del> }
<del>
<del> protected static void gsCopyTriples(DatasetGraph dsg, Target src, Target dest)
<del> {
<del> Graph gSrc = graph(dsg, src) ;
<del> Graph gDest = graph(dsg, dest) ;
<del>
<add> gsAddTriples(dsg, src, dest) ;
<add> }
<add>
<add> /** Add triples from src to dest */
<add> protected static void gsAddTriples(DatasetGraph dsg, Target src, Target dest) {
<add> Graph gSrc = graph(dsg, src);
<add> Graph gDest = graph(dsg, dest);
<add>
<ide> // Avoids concurrency problems by reading fully before writing
<ide> ThresholdPolicy<Triple> policy = ThresholdPolicyFactory.policyFromContext(dsg.getContext());
<del> DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory()) ;
<del> try
<del> {
<del> Iterator<Triple> triples = gSrc.find(null, null, null) ;
<del> db.addAll(triples) ;
<del> Iter.close(triples) ;
<del> GraphOps.addAll(gDest, db.iterator()) ;
<del> }
<del> finally { db.close() ; }
<del> }
<del>
<del> protected static void gsClear(DatasetGraph dsg, Target target, boolean isSilent)
<del> {
<add> DataBag<Triple> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.tripleSerializationFactory());
<add> try {
<add> Iterator<Triple> triples = gSrc.find(null, null, null);
<add> db.addAll(triples);
<add> Iter.close(triples);
<add> GraphOps.addAll(gDest, db.iterator());
<add> }
<add> finally {
<add> db.close();
<add> }
<add> }
<add>
<add> /** Clear target */
<add> protected static void gsClear(DatasetGraph dsg, Target target, boolean isSilent) {
<ide> // No create - we tested earlier.
<del> Graph g = graph(dsg, target) ;
<del> g.clear() ;
<del> }
<del>
<del> protected static void gsDrop(DatasetGraph dsg, Target target, boolean isSilent)
<del> {
<add> Graph g = graph(dsg, target);
<add> g.clear();
<add> }
<add>
<add> /** Remove the target graph */
<add> protected static void gsDrop(DatasetGraph dsg, Target target, boolean isSilent) {
<ide> if ( target.isDefault() )
<del> dsg.getDefaultGraph().clear() ;
<add> dsg.getDefaultGraph().clear();
<ide> else
<del> dsg.removeGraph(target.getGraph()) ;
<add> dsg.removeGraph(target.getGraph());
<ide> }
<ide>
<ide> // ----
<ide>
<ide> @Override
<del> public Sink<Quad> createInsertDataSink()
<del> {
<del> return new Sink<Quad>()
<del> {
<add> public Sink<Quad> createInsertDataSink() {
<add> return new Sink<Quad>() {
<ide> @Override
<del> public void send(Quad quad)
<del> {
<add> public void send(Quad quad) {
<ide> addTodatasetGraph(datasetGraph, quad);
<ide> }
<ide>
<ide> @Override
<del> public void flush()
<del> {
<add> public void flush() {
<ide> SystemARQ.sync(datasetGraph);
<ide> }
<add>
<add> @Override
<add> public void close() {}
<add> };
<add> }
<ide>
<add> @Override
<add> public void visit(UpdateDataInsert update) {
<add> for ( Quad quad : update.getQuads() )
<add> addTodatasetGraph(datasetGraph, quad);
<add> }
<add>
<add> @Override
<add> public Sink<Quad> createDeleteDataSink() {
<add> return new Sink<Quad>() {
<ide> @Override
<del> public void close()
<del> { }
<add> public void send(Quad quad) {
<add> deleteFromDatasetGraph(datasetGraph, quad);
<add> }
<add>
<add> @Override
<add> public void flush() {
<add> SystemARQ.sync(datasetGraph);
<add> }
<add>
<add> @Override
<add> public void close() {}
<ide> };
<ide> }
<del>
<del> @Override
<del> public void visit(UpdateDataInsert update)
<del> {
<add>
<add> @Override
<add> public void visit(UpdateDataDelete update) {
<ide> for ( Quad quad : update.getQuads() )
<del> addTodatasetGraph(datasetGraph, quad) ;
<del> }
<del>
<del> @Override
<del> public Sink<Quad> createDeleteDataSink()
<del> {
<del> return new Sink<Quad>()
<del> {
<del> @Override
<del> public void send(Quad quad)
<del> {
<del> deleteFromdatasetGraph(datasetGraph, quad);
<del> }
<del>
<del> @Override
<del> public void flush()
<del> {
<del> SystemARQ.sync(datasetGraph);
<del> }
<del>
<del> @Override
<del> public void close()
<del> { }
<del> };
<del> }
<del>
<del> @Override
<del> public void visit(UpdateDataDelete update)
<del> {
<del> for ( Quad quad : update.getQuads() )
<del> deleteFromdatasetGraph(datasetGraph, quad) ;
<del> }
<del>
<del> @Override
<del> public void visit(UpdateDeleteWhere update)
<del> {
<add> deleteFromDatasetGraph(datasetGraph, quad);
<add> }
<add>
<add> @Override
<add> public void visit(UpdateDeleteWhere update) {
<ide> List<Quad> quads = update.getQuads() ;
<del> // Convert bNodes to named variables first.
<del>// if ( false )
<del>// // Removed from SPARQL
<del>// quads = convertBNodesToVariables(quads) ;
<add> // Removed from SPARQL : Convert bNodes to named variables first.
<add> //quads = convertBNodesToVariables(quads) ;
<add>
<ide> // Convert quads to a pattern.
<ide> Element el = elementFromQuads(quads) ;
<ide>
<ide> // serialize the quads after applying the template instead.
<ide>
<ide> ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
<del> DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ;
<del> try
<del> {
<del> Iterator<Binding> bindings = evalBindings(el, null) ;
<del> db.addAll(bindings) ;
<del> Iter.close(bindings) ;
<del>
<del> Iterator<Binding> it = db.iterator() ;
<del> execDelete(datasetGraph, quads, null, it) ;
<del> Iter.close(it) ;
<del> }
<del> finally
<del> {
<del> db.close() ;
<add> DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory());
<add> try {
<add> Iterator<Binding> bindings = evalBindings(el, null);
<add> db.addAll(bindings);
<add> Iter.close(bindings);
<add>
<add> Iterator<Binding> it = db.iterator();
<add> execDelete(datasetGraph, quads, null, it);
<add> Iter.close(it);
<add> }
<add> finally {
<add> db.close();
<ide> }
<ide> }
<ide>
<ide> @Override
<del> public void visit(UpdateModify update)
<del> {
<del> Node withGraph = update.getWithIRI() ;
<del> Element elt = update.getWherePattern() ;
<del>
<del> // null or a dataset for USING clause.
<add> public void visit(UpdateModify update) {
<add> Node withGraph = update.getWithIRI();
<add> Element elt = update.getWherePattern();
<add>
<add> // null or a dataset for USING clause.
<ide> // USING/USING NAMED
<del> DatasetGraph dsg = processUsing(update) ;
<del>
<add> DatasetGraph dsg = processUsing(update);
<add>
<ide> // -------------------
<ide> // WITH
<ide> // USING overrides WITH
<ide> if ( dsg == null && withGraph != null ) {
<ide> if ( false ) {
<add> // Subtle difference : WITH <uri>... WHERE {}
<add> // and an empty/unknown graph <uri>
<add> // rewrite with GRAPH -> no match.
<add> // redo as dataset with different default graph -> match
<add> // SPARQL is unclear abotu what happens when the graph does not exist.
<add> // The rewite means the raw query engine is used though.
<add>
<ide> // Ye Olde way - create a special dataset
<ide> dsg = processWith(update) ;
<ide> withGraph = null ;
<ide> Query query = elementToQuery(elt) ;
<ide> ThresholdPolicy<Binding> policy = ThresholdPolicyFactory.policyFromContext(datasetGraph.getContext());
<ide> DataBag<Binding> db = BagFactory.newDefaultBag(policy, SerializationFactoryFinder.bindingSerializationFactory()) ;
<del> try
<del> {
<del> Iterator<Binding> bindings = evalBindings(query, dsg, inputBinding, context) ;
<del>
<del> if ( false )
<del> {
<del> List<Binding> x = Iter.toList(bindings) ;
<del> System.out.printf("====>> Bindings (%d)\n", x.size()) ;
<del> Iter.print(System.out, x.iterator()) ;
<del> System.out.println("====<<") ;
<del> bindings = Iter.iter(x) ;
<del> }
<del> db.addAll(bindings) ;
<del> Iter.close(bindings) ;
<del>
<del> Iterator<Binding> it = db.iterator() ;
<del> execDelete(dsg, update.getDeleteQuads(), withGraph, it) ;
<del> Iter.close(it) ;
<del>
<del> Iterator<Binding> it2 = db.iterator() ;
<del> execInsert(dsg, update.getInsertQuads(), withGraph, it2) ;
<del> Iter.close(it2) ;
<del> }
<del> finally
<del> {
<del> db.close() ;
<add> try {
<add> Iterator<Binding> bindings = evalBindings(query, dsg, inputBinding, context);
<add>
<add> if ( false ) {
<add> List<Binding> x = Iter.toList(bindings);
<add> System.out.printf("====>> Bindings (%d)\n", x.size());
<add> Iter.print(System.out, x.iterator());
<add> System.out.println("====<<");
<add> bindings = Iter.iter(x);
<add> }
<add> db.addAll(bindings);
<add> Iter.close(bindings);
<add>
<add> Iterator<Binding> it = db.iterator();
<add> execDelete(dsg, update.getDeleteQuads(), withGraph, it);
<add> Iter.close(it);
<add>
<add> Iterator<Binding> it2 = db.iterator();
<add> execInsert(dsg, update.getInsertQuads(), withGraph, it2);
<add> Iter.close(it2);
<add> }
<add> finally {
<add> db.close();
<ide> }
<ide> }
<ide>
<ide> // Indirection for subsystems to support USING/USING NAMED.
<del> protected DatasetGraph processUsing(UpdateModify update)
<del> {
<add> protected DatasetGraph processUsing(UpdateModify update) {
<ide> if ( update.getUsing().size() == 0 && update.getUsingNamed().size() == 0 )
<del> return null ;
<del>
<del> return DynamicDatasets.dynamicDataset(update.getUsing(), update.getUsingNamed(), datasetGraph, false) ;
<del> }
<del>
<del> protected DatasetGraph processWith(UpdateModify update)
<del> {
<del> Node withGraph = update.getWithIRI() ;
<add> return null;
<add>
<add> return DynamicDatasets.dynamicDataset(update.getUsing(), update.getUsingNamed(), datasetGraph, false);
<add> }
<add>
<add> protected DatasetGraph processWith(UpdateModify update) {
<add> Node withGraph = update.getWithIRI();
<ide> if ( withGraph == null )
<del> return null ;
<del> Graph g = graphOrDummy(datasetGraph, withGraph) ;
<del> DatasetGraph dsg = new DatasetGraphAltDefaultGraph(datasetGraph, g) ;
<del> return dsg ;
<del> }
<del>
<del> private Graph graphOrDummy(DatasetGraph dsg, Node gn)
<del> {
<del> Graph g = graph(datasetGraph, gn) ;
<add> return null;
<add> Graph g = graphOrDummy(datasetGraph, withGraph);
<add> DatasetGraph dsg = new DatasetGraphAltDefaultGraph(datasetGraph, g);
<add> return dsg;
<add> }
<add>
<add> private Graph graphOrDummy(DatasetGraph dsg, Node gn) {
<add> Graph g = graph(datasetGraph, gn);
<ide> if ( g == null )
<del> g = GraphFactory.createGraphMem() ;
<del> return g ;
<del> }
<del>
<del> protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads)
<del> {
<del> NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables() ;
<del> return NodeTransformLib.transformQuads(bnodesToVariables, quads) ;
<del> }
<del>
<del> protected Element elementFromQuads(List<Quad> quads)
<del> {
<del> ElementGroup el = new ElementGroup() ;
<del> ElementTriplesBlock x = new ElementTriplesBlock() ;
<add> g = GraphFactory.createGraphMem();
<add> return g;
<add> }
<add>
<add> protected static List<Quad> unused_convertBNodesToVariables(List<Quad> quads) {
<add> NodeTransform bnodesToVariables = new NodeTransformBNodesToVariables();
<add> return NodeTransformLib.transformQuads(bnodesToVariables, quads);
<add> }
<add>
<add> protected Element elementFromQuads(List<Quad> quads) {
<add> ElementGroup el = new ElementGroup();
<add> ElementTriplesBlock x = new ElementTriplesBlock();
<ide> // Maybe empty??
<del> el.addElement(x) ;
<del> Node g = Quad.defaultGraphNodeGenerated ;
<del>
<del> for ( Quad q : quads )
<del> {
<del> if ( q.getGraph() != g )
<del> {
<del> g = q.getGraph() ;
<del> x = new ElementTriplesBlock() ;
<add> el.addElement(x);
<add> Node g = Quad.defaultGraphNodeGenerated;
<add>
<add> for ( Quad q : quads ) {
<add> if ( q.getGraph() != g ) {
<add> g = q.getGraph();
<add> x = new ElementTriplesBlock();
<ide> if ( g == null || g == Quad.defaultGraphNodeGenerated )
<del> el.addElement(x) ;
<del> else
<del> {
<del> ElementNamedGraph eng = new ElementNamedGraph(g, x) ;
<del> el.addElement(eng) ;
<add> el.addElement(x);
<add> else {
<add> ElementNamedGraph eng = new ElementNamedGraph(g, x);
<add> el.addElement(eng);
<ide> }
<ide> }
<del> x.addTriple(q.asTriple()) ;
<del> }
<del> return el ;
<del> }
<del>
<del> // JENA-1059
<add> x.addTriple(q.asTriple());
<add> }
<add> return el;
<add> }
<add>
<add> // JENA-1059 : optimization : process templates for ground triples and do these once.
<ide> // execDelete ; execInsert
<ide> // Quads involving only IRIs and literals do not change from binding to
<ide> // binding so any inserts, rather than repeatedly if they are going to be
<ide> }
<ide>
<ide> private static boolean constQuad(Quad quad) {
<del> return constTerm(quad.getGraph()) &&
<del> constTerm(quad.getSubject()) &&
<del> constTerm(quad.getPredicate()) &&
<del> constTerm(quad.getObject()) ;
<add> return constTerm(quad.getGraph()) && constTerm(quad.getSubject()) &&
<add> constTerm(quad.getPredicate()) && constTerm(quad.getObject());
<ide> }
<ide>
<ide> private static boolean constTerm(Node n) {
<ide> protected static void execDelete(DatasetGraph dsg, List<Quad> onceQuads, List<Quad> templateQuads, Node dftGraph, Iterator<Binding> bindings) {
<ide> if ( onceQuads != null && bindings.hasNext() ) {
<ide> onceQuads = remapDefaultGraph(onceQuads, dftGraph) ;
<del> onceQuads.forEach(q->deleteFromdatasetGraph(dsg, q)) ;
<add> onceQuads.forEach(q->deleteFromDatasetGraph(dsg, q)) ;
<ide> }
<ide> Iterator<Quad> it = template(templateQuads, dftGraph, bindings) ;
<ide> if ( it == null )
<ide> return ;
<del> it.forEachRemaining(q->deleteFromdatasetGraph(dsg, q)) ;
<add> it.forEachRemaining(q->deleteFromDatasetGraph(dsg, q)) ;
<ide> }
<ide>
<ide> protected static void execInsert(DatasetGraph dsg, List<Quad> quads, Node dftGraph, Iterator<Binding> bindings) {
<ide> return ;
<ide> it.forEachRemaining((q)->addTodatasetGraph(dsg, q)) ;
<ide> }
<del>
<del> // Catch all individual adds of quads
<del> private static void addTodatasetGraph(DatasetGraph datasetGraph, Quad quad)
<del> {
<add>
<add> // Catch all individual adds of quads
<add> private static void addTodatasetGraph(DatasetGraph datasetGraph, Quad quad) {
<ide> // Check legal triple.
<ide> if ( quad.isLegalAsData() )
<ide> datasetGraph.add(quad);
<ide> // Else drop.
<del> //Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ;
<del> }
<del>
<del> // Catch all individual deletes of quads
<del> private static void deleteFromdatasetGraph(DatasetGraph datasetGraph, Quad quad)
<del> {
<del> datasetGraph.delete(quad) ;
<del> }
<del>
<del> protected Query elementToQuery(Element pattern)
<del> {
<add> // Log.warn(UpdateEngineWorker.class, "Bad quad as data: "+quad) ;
<add> }
<add>
<add> // Catch all individual deletes of quads
<add> private static void deleteFromDatasetGraph(DatasetGraph datasetGraph, Quad quad) {
<add> datasetGraph.delete(quad);
<add> }
<add>
<add> protected Query elementToQuery(Element pattern) {
<ide> if ( pattern == null )
<del> return null ;
<del> Query query = new Query() ;
<del> query.setQueryPattern(pattern) ;
<del> query.setQuerySelectType() ;
<del> query.setQueryResultStar(true) ;
<del> query.setResultVars() ;
<del> return query ;
<del> }
<del>
<del> protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph)
<del> {
<del> return evalBindings(elementToQuery(pattern), dftGraph) ;
<del> }
<del>
<del> protected Iterator<Binding> evalBindings(Query query, Node dftGraph)
<del> {
<del> DatasetGraph dsg = datasetGraph ;
<del> if ( query != null )
<del> {
<del> if ( dftGraph != null )
<del> {
<del> Graph g = graphOrDummy(dsg, dftGraph) ;
<del> dsg = new DatasetGraphAltDefaultGraph(dsg, g) ;
<del> }
<del> }
<del>
<del> return evalBindings(query, dsg, inputBinding, context) ;
<del>
<del> }
<del>
<del> protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding inputBinding, Context context)
<del> {
<del> // SET UP CONTEXT
<del> // The UpdateProcessorBase already copied the context and made it safe ... but that's going to happen again :-(
<del>
<del> Iterator<Binding> toReturn ;
<del>
<del> if ( query != null )
<del> {
<del> Plan plan = QueryExecutionFactory.createPlan(query, dsg, inputBinding, context) ;
<add> return null;
<add> Query query = new Query();
<add> query.setQueryPattern(pattern);
<add> query.setQuerySelectType();
<add> query.setQueryResultStar(true);
<add> query.setResultVars();
<add> return query;
<add> }
<add>
<add> protected Iterator<Binding> evalBindings(Element pattern, Node dftGraph) {
<add> return evalBindings(elementToQuery(pattern), dftGraph);
<add> }
<add>
<add> protected Iterator<Binding> evalBindings(Query query, Node dftGraph) {
<add> DatasetGraph dsg = datasetGraph;
<add> if ( query != null ) {
<add> if ( dftGraph != null ) {
<add> Graph g = graphOrDummy(dsg, dftGraph);
<add> dsg = new DatasetGraphAltDefaultGraph(dsg, g);
<add> }
<add> }
<add> return evalBindings(query, dsg, inputBinding, context);
<add> }
<add>
<add> protected static Iterator<Binding> evalBindings(Query query, DatasetGraph dsg, Binding inputBinding, Context context) {
<add> // The UpdateProcessorBase already copied the context and made it safe
<add> // ... but that's going to happen again :-(
<add>
<add> Iterator<Binding> toReturn;
<add>
<add> if ( query != null ) {
<add> Plan plan = QueryExecutionFactory.createPlan(query, dsg, inputBinding, context);
<ide> toReturn = plan.iterator();
<del> }
<add> } else {
<add> toReturn = Iter.singleton((null != inputBinding) ? inputBinding : BindingRoot.create());
<add> }
<add> return toReturn;
<add> }
<add>
<add> protected static Graph graph(DatasetGraph datasetGraph, Node gn) {
<add> if ( gn == null || gn == Quad.defaultGraphNodeGenerated )
<add> return datasetGraph.getDefaultGraph();
<ide> else
<del> {
<del> toReturn = Iter.singleton((null != inputBinding) ? inputBinding : BindingRoot.create()) ;
<del> }
<del> return toReturn ;
<del> }
<del>
<del> protected static Graph graph(DatasetGraph datasetGraph, Node gn)
<del> {
<del> if ( gn == null || gn == Quad.defaultGraphNodeGenerated )
<del> return datasetGraph.getDefaultGraph() ;
<del> else
<del> return datasetGraph.getGraph(gn) ;
<del> }
<del>
<del> protected static Graph graph(DatasetGraph datasetGraph, Target target)
<del> {
<add> return datasetGraph.getGraph(gn);
<add> }
<add>
<add> protected static Graph graph(DatasetGraph datasetGraph, Target target) {
<ide> if ( target.isDefault() )
<del> return datasetGraph.getDefaultGraph() ;
<add> return datasetGraph.getDefaultGraph();
<ide> if ( target.isOneNamedGraph() )
<del> return graph(datasetGraph, target.getGraph()) ;
<del> error("Target does not name one graph: "+target) ;
<del> return null ;
<del> }
<del>
<del> protected static void error(String msg)
<del> {
<del> throw new UpdateException(msg) ;
<add> return graph(datasetGraph, target.getGraph());
<add> error("Target does not name one graph: " + target);
<add> return null;
<add> }
<add>
<add> protected static void error(String msg) {
<add> throw new UpdateException(msg);
<ide> }
<ide> } |
|
Java | mit | 04104cfb81f45b388aa57bfa3d875e7efc6fec6d | 0 | MarkEWaite/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,jenkinsci/git-plugin,martinda/git-plugin,jenkinsci/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,v1v/git-plugin,martinda/git-plugin,martinda/git-plugin,v1v/git-plugin,v1v/git-plugin,MarkEWaite/git-plugin | package hudson.plugins.git.util;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.DescriptorExtensionList;
import hudson.ExtensionPoint;
import hudson.model.Describable;
import jenkins.model.Jenkins;
import hudson.model.Item;
import hudson.model.TaskListener;
import hudson.plugins.git.GitException;
import hudson.plugins.git.GitSCM;
import hudson.plugins.git.IGitAPI;
import hudson.plugins.git.Revision;
import org.jenkinsci.plugins.gitclient.GitClient;
import javax.annotation.ParametersAreNonnullByDefault;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Interface defining an API to choose which revisions ought to be
* considered for building.
*
* <p>
* This object is persisted as a part of the project configuration.
*
* @author magnayn
* @author Kohsuke Kawaguchi
*/
public abstract class BuildChooser implements ExtensionPoint, Describable<BuildChooser>, Serializable {
/**
* Refers back to the {@link GitSCM} that owns this build chooser.
* Do not modify from outside {@link GitSCM}.
*/
public transient GitSCM gitSCM;
/**
* Short-hand to get to the display name.
* @return display name of this build chooser
*/
public final String getDisplayName() {
return getDescriptor().getDisplayName();
}
/**
* Get a list of revisions that are candidates to be built.
*
* <p>
* This method is invoked on the node where the workspace exists, which may not be the master.
*
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git
* Used for invoking Git
* @param listener
* build log
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return the candidate revision. Can be an empty set to indicate that there's nothing to build.
*
* @throws IOException on input or output error
* @throws GitException on git error
* @throws InterruptedException when interrupted
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, @CheckForNull String singleBranch,
@NonNull GitClient git, @NonNull TaskListener listener,
@NonNull BuildData buildData, @NonNull BuildChooserContext context)
throws GitException, IOException, InterruptedException {
// fallback to the previous signature
return getCandidateRevisions(isPollCall, singleBranch, (IGitAPI) git, listener, buildData, context);
}
/**
* @deprecated as of 1.2.0
* Use and override {@link #getCandidateRevisions(boolean, String, org.jenkinsci.plugins.gitclient.GitClient, hudson.model.TaskListener, BuildData, BuildChooserContext)}
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git
* Used for invoking Git
* @param listener
* build log
* @param buildData
* Information that captures what we did during the last build.
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on input or output error
* @throws GitException on git error
* @throws InterruptedException when interrupted
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, String singleBranch,
IGitAPI git, TaskListener listener, BuildData buildData, BuildChooserContext context) throws GitException, IOException, InterruptedException {
// fallback to the previous signature
return getCandidateRevisions(isPollCall,singleBranch,git,listener,buildData);
}
/**
* @deprecated as of 1.1.17
* Use and override {@link #getCandidateRevisions(boolean, String, IGitAPI, TaskListener, BuildData, BuildChooserContext)}
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git GitClient used to access repository
* @param listener build log
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on input or output error
* @throws GitException on git error
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, String singleBranch,
IGitAPI git, TaskListener listener, BuildData buildData) throws GitException, IOException {
throw new UnsupportedOperationException("getCandidateRevisions method must be overridden");
}
/**
* @deprecated as of 1.1.25
* Use and override {@link #prevBuildForChangelog(String, BuildData, IGitAPI, BuildChooserContext)}
* @param branch contains the name of branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @return
* the candidate revision. Can be an empty set to indicate that there's nothi */
public Build prevBuildForChangelog(String branch, @Nullable BuildData buildData, IGitAPI git) {
return buildData == null ? null : buildData.getLastBuildOfBranch(branch);
}
/**
* Determines the baseline to compute the changelog against.
*
* <p>
* {@link #getCandidateRevisions(boolean, String, IGitAPI, TaskListener, BuildData, BuildChooserContext)} determine
* what commits can be subject for a build, and for each commit it determines the branches that contribute to them.
*
* <p>
* Once {@link GitSCM} picks up a specific {@link Revision} to build, {@linkplain Revision#getBranches() for each branch},
* in that revision, this method is called to compute the changelog.
*
* @param branch
* The branch name.
* @param data
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
* @throws IOException on input or output error
* @throws InterruptedException when interrupted
* @return the candidate revision. Can be an empty set to indicate that there's nothing to build.
*/
public Build prevBuildForChangelog(String branch, @Nullable BuildData data, GitClient git, BuildChooserContext context) throws IOException,InterruptedException {
return prevBuildForChangelog(branch,data, (IGitAPI) git, context);
}
/**
* @deprecated as of 1.2.0
* Use and override {@link #prevBuildForChangelog(String, BuildData, org.jenkinsci.plugins.gitclient.GitClient, BuildChooserContext)}
* @param branch contains the name of a branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param data
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on I/O error
* @throws GitException on git error
* @throws InterruptedException if interrupted
*/
public Build prevBuildForChangelog(String branch, @Nullable BuildData data, IGitAPI git, BuildChooserContext context) throws IOException,InterruptedException {
return prevBuildForChangelog(branch,data,git);
}
/**
* Returns build chooser descriptor.
* @return build chooser descriptor
*/
@SuppressFBWarnings(value="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification="Jenkins.getInstance() is not null")
public BuildChooserDescriptor getDescriptor() {
return (BuildChooserDescriptor)Jenkins.getInstance().getDescriptorOrDie(getClass());
}
/**
* All the registered build choosers.
* @return all registered build choosers
*/
@SuppressFBWarnings(value="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification="Jenkins.getInstance() is not null")
public static DescriptorExtensionList<BuildChooser,BuildChooserDescriptor> all() {
return Jenkins.getInstance()
.<BuildChooser,BuildChooserDescriptor>getDescriptorList(BuildChooser.class);
}
/**
* All the registered build choosers that are applicable to the specified item.
*
* @param item the item.
* @return All build choosers applicable to item
*/
public static List<BuildChooserDescriptor> allApplicableTo(Item item) {
List<BuildChooserDescriptor> result = new ArrayList<>();
for (BuildChooserDescriptor d: all()) {
if (d.isApplicable(item.getClass()))
result.add(d);
}
return result;
}
private static final long serialVersionUID = 1L;
/**
* In a general case, a working tree is a left-over from the previous build, so it can be quite
* messed up (such as HEAD pointing to a random branch). This method is responsible to bring the
* working copy to a predictable clean state where candidate revisions can be evaluated.
* <p>
* Typical use-case is a BuildChooser which do handle pull-request merge for validation. Such a
* BuildChooser will run the merge on working copy, and expose the merge commit as
* {@link BuildChooser#getCandidateRevisions(boolean, String, org.jenkinsci.plugins.gitclient.GitClient, hudson.model.TaskListener, BuildData, BuildChooserContext)}
*
* @param git client to execute git commands on working tree
* @param listener build log
* @param context back-channel to master so implementation can interact with Jenkins model
* @throws IOException on input or output error
* @throws InterruptedException when interrupted
*/
@ParametersAreNonnullByDefault
public void prepareWorkingTree(GitClient git, TaskListener listener, BuildChooserContext context) throws IOException,InterruptedException {
// Nop
}
}
| src/main/java/hudson/plugins/git/util/BuildChooser.java | package hudson.plugins.git.util;
import edu.umd.cs.findbugs.annotations.CheckForNull;
import edu.umd.cs.findbugs.annotations.NonNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import hudson.DescriptorExtensionList;
import hudson.ExtensionPoint;
import hudson.model.Describable;
import jenkins.model.Jenkins;
import hudson.model.Item;
import hudson.model.TaskListener;
import hudson.plugins.git.GitException;
import hudson.plugins.git.GitSCM;
import hudson.plugins.git.IGitAPI;
import hudson.plugins.git.Revision;
import org.jenkinsci.plugins.gitclient.GitClient;
import javax.annotation.ParametersAreNonnullByDefault;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Interface defining an API to choose which revisions ought to be
* considered for building.
*
* <p>
* This object is persisted as a part of the project configuration.
*
* @author magnayn
* @author Kohsuke Kawaguchi
*/
public abstract class BuildChooser implements ExtensionPoint, Describable<BuildChooser>, Serializable {
/**
* Refers back to the {@link GitSCM} that owns this build chooser.
* Do not modify from outside {@link GitSCM}.
*/
public transient GitSCM gitSCM;
/**
* Short-hand to get to the display name.
* @return display name of this build chooser
*/
public final String getDisplayName() {
return getDescriptor().getDisplayName();
}
/**
* Get a list of revisions that are candidates to be built.
*
* <p>
* This method is invoked on the node where the workspace exists, which may not be the master.
*
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git
* Used for invoking Git
* @param listener
* build log
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
*
* @throws IOException on input or output error
* @throws GitException on git error
* @throws InterruptedException when interrupted
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, @CheckForNull String singleBranch,
@NonNull GitClient git, @NonNull TaskListener listener,
@NonNull BuildData buildData, @NonNull BuildChooserContext context)
throws GitException, IOException, InterruptedException {
// fallback to the previous signature
return getCandidateRevisions(isPollCall, singleBranch, (IGitAPI) git, listener, buildData, context);
}
/**
* @deprecated as of 1.2.0
* Use and override {@link #getCandidateRevisions(boolean, String, org.jenkinsci.plugins.gitclient.GitClient, hudson.model.TaskListener, BuildData, BuildChooserContext)}
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git
* Used for invoking Git
* @param listener
* build log
* @param buildData
* Information that captures what we did during the last build.
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on input or output error
* @throws GitException on git error
* @throws InterruptedException when interrupted
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, String singleBranch,
IGitAPI git, TaskListener listener, BuildData buildData, BuildChooserContext context) throws GitException, IOException, InterruptedException {
// fallback to the previous signature
return getCandidateRevisions(isPollCall,singleBranch,git,listener,buildData);
}
/**
* @deprecated as of 1.1.17
* Use and override {@link #getCandidateRevisions(boolean, String, IGitAPI, TaskListener, BuildData, BuildChooserContext)}
* @param isPollCall true if this method is called from pollChanges.
* @param singleBranch contains the name of a single branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param git GitClient used to access repository
* @param listener build log
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on input or output error
* @throws GitException on git error
*/
public Collection<Revision> getCandidateRevisions(boolean isPollCall, String singleBranch,
IGitAPI git, TaskListener listener, BuildData buildData) throws GitException, IOException {
throw new UnsupportedOperationException("getCandidateRevisions method must be overridden");
}
/**
* @deprecated as of 1.1.25
* Use and override {@link #prevBuildForChangelog(String, BuildData, IGitAPI, BuildChooserContext)}
* @param branch contains the name of branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param buildData build data to be used
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @return
* the candidate revision. Can be an empty set to indicate that there's nothi */
public Build prevBuildForChangelog(String branch, @Nullable BuildData buildData, IGitAPI git) {
return buildData == null ? null : buildData.getLastBuildOfBranch(branch);
}
/**
* Determines the baseline to compute the changelog against.
*
* <p>
* {@link #getCandidateRevisions(boolean, String, IGitAPI, TaskListener, BuildData, BuildChooserContext)} determine
* what commits can be subject for a build, and for each commit it determines the branches that contribute to them.
*
* <p>
* Once {@link GitSCM} picks up a specific {@link Revision} to build, {@linkplain Revision#getBranches() for each branch},
* in that revision, this method is called to compute the changelog.
*
* @param branch
* The branch name.
* @param data
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
* @return preceding build
* @throws IOException on input or output error
* @throws InterruptedException when interrupted
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
*/
public Build prevBuildForChangelog(String branch, @Nullable BuildData data, GitClient git, BuildChooserContext context) throws IOException,InterruptedException {
return prevBuildForChangelog(branch,data, (IGitAPI) git, context);
}
/**
* @deprecated as of 1.2.0
* Use and override {@link #prevBuildForChangelog(String, BuildData, org.jenkinsci.plugins.gitclient.GitClient, BuildChooserContext)}
* @param branch contains the name of a branch to be built
* this will be non-null only in the simple case, in advanced
* cases with multiple repositories and/or branches specified
* then this value will be null.
* @param data
* Information that captures what we did during the last build.
* @param git
* Used for invoking Git
* @param context
* Object that provides access back to the model object. This is because
* the build chooser can be invoked on a slave where there's no direct access
* to the build/project for which this is invoked.
*
* If {@code isPollCall} is false, then call back to both project and build are available.
* If {@code isPollCall} is true, then only the callback to the project is available as there's
* no contextual build object.
* @return
* the candidate revision. Can be an empty set to indicate that there's nothing to build.
* @throws IOException on I/O error
* @throws GitException on git error
* @throws InterruptedException if interrupted
*/
public Build prevBuildForChangelog(String branch, @Nullable BuildData data, IGitAPI git, BuildChooserContext context) throws IOException,InterruptedException {
return prevBuildForChangelog(branch,data,git);
}
/**
* Returns build chooser descriptor.
* @return build chooser descriptor
*/
@SuppressFBWarnings(value="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification="Jenkins.getInstance() is not null")
public BuildChooserDescriptor getDescriptor() {
return (BuildChooserDescriptor)Jenkins.getInstance().getDescriptorOrDie(getClass());
}
/**
* All the registered build choosers.
* @return all registered build choosers
*/
@SuppressFBWarnings(value="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification="Jenkins.getInstance() is not null")
public static DescriptorExtensionList<BuildChooser,BuildChooserDescriptor> all() {
return Jenkins.getInstance()
.<BuildChooser,BuildChooserDescriptor>getDescriptorList(BuildChooser.class);
}
/**
* All the registered build choosers that are applicable to the specified item.
*
* @param item the item.
* @return All build choosers applicable to item
*/
public static List<BuildChooserDescriptor> allApplicableTo(Item item) {
List<BuildChooserDescriptor> result = new ArrayList<>();
for (BuildChooserDescriptor d: all()) {
if (d.isApplicable(item.getClass()))
result.add(d);
}
return result;
}
private static final long serialVersionUID = 1L;
/**
* In a general case, a working tree is a left-over from the previous build, so it can be quite
* messed up (such as HEAD pointing to a random branch). This method is responsible to bring the
* working copy to a predictable clean state where candidate revisions can be evaluated.
* <p>
* Typical use-case is a BuildChooser which do handle pull-request merge for validation. Such a
* BuildChooser will run the merge on working copy, and expose the merge commit as
* {@link BuildChooser#getCandidateRevisions(boolean, String, org.jenkinsci.plugins.gitclient.GitClient, hudson.model.TaskListener, BuildData, BuildChooserContext)}
*
* @param git client to execute git commands on working tree
* @param listener build log
* @param context back-channel to master so implementation can interact with Jenkins model
* @throws IOException on input or output error
* @throws InterruptedException when interrupted
*/
@ParametersAreNonnullByDefault
public void prepareWorkingTree(GitClient git, TaskListener listener, BuildChooserContext context) throws IOException,InterruptedException {
// Nop
}
}
| Fix javadoc for Java 11
| src/main/java/hudson/plugins/git/util/BuildChooser.java | Fix javadoc for Java 11 | <ide><path>rc/main/java/hudson/plugins/git/util/BuildChooser.java
<ide> * If {@code isPollCall} is false, then call back to both project and build are available.
<ide> * If {@code isPollCall} is true, then only the callback to the project is available as there's
<ide> * no contextual build object.
<del> * @return
<del> * the candidate revision. Can be an empty set to indicate that there's nothing to build.
<add> * @return the candidate revision. Can be an empty set to indicate that there's nothing to build.
<ide> *
<ide> * @throws IOException on input or output error
<ide> * @throws GitException on git error
<ide> * Object that provides access back to the model object. This is because
<ide> * the build chooser can be invoked on a slave where there's no direct access
<ide> * to the build/project for which this is invoked.
<del> * @return preceding build
<del> * @throws IOException on input or output error
<del> * @throws InterruptedException when interrupted
<del> * @return
<del> * the candidate revision. Can be an empty set to indicate that there's nothing to build.
<add> * @throws IOException on input or output error
<add> * @throws InterruptedException when interrupted
<add> * @return the candidate revision. Can be an empty set to indicate that there's nothing to build.
<ide> */
<ide> public Build prevBuildForChangelog(String branch, @Nullable BuildData data, GitClient git, BuildChooserContext context) throws IOException,InterruptedException {
<ide> return prevBuildForChangelog(branch,data, (IGitAPI) git, context); |
|
Java | agpl-3.0 | bfa649d65d45a5ff7fbf7bc747d5e8a21786fb9b | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 5089c09e-2e61-11e5-9284-b827eb9e62be | hello.java | 50845096-2e61-11e5-9284-b827eb9e62be | 5089c09e-2e61-11e5-9284-b827eb9e62be | hello.java | 5089c09e-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>50845096-2e61-11e5-9284-b827eb9e62be
<add>5089c09e-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 0e0bd07ccd3b647f0e2325b4de10fa781769808b | 0 | xaoseric/PermissionsEx,xaoseric/PermissionsEx,Phoenix616/PermissionsEx,Phoenix616/PermissionsEx,PEXPlugins/PermissionsEx,PEXPlugins/PermissionsEx,PEXPlugins/PermissionsEx | /*
* PermissionsEx - Permissions plugin for Bukkit
* Copyright (C) 2011 t3hk0d3 http://www.tehkode.ru
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ru.tehkode.permissions.bukkit;
import java.io.File;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.*;
import org.bukkit.event.Event.Priority;
import org.bukkit.event.block.*;
import org.bukkit.event.entity.*;
import org.bukkit.event.player.*;
import org.bukkit.event.vehicle.*;
import org.bukkit.plugin.*;
import org.bukkit.plugin.java.JavaPlugin;
import ru.tehkode.permissions.*;
import ru.tehkode.permissions.backends.*;
import ru.tehkode.permissions.commands.CommandsManager;
import ru.tehkode.permissions.config.Configuration;
/**
*
* @author code
*/
public class PermissionsPlugin extends JavaPlugin {
protected static final String configFile = "config.yml";
protected static final Logger logger = Logger.getLogger("Minecraft");
protected PermissionManager permissionsManager;
protected CommandsManager commandsManager;
public PermissionsPlugin() {
super();
PermissionBackend.registerBackendAlias("sql", SQLBackend.class);
PermissionBackend.registerBackendAlias("file", FileBackend.class);
logger.log(Level.INFO, "[PermissionsEx] PermissionEx plugin was Initialized.");
}
@Override
public void onLoad() {
this.commandsManager = new CommandsManager(this);
this.permissionsManager = new PermissionManager(this.loadConfig(configFile));
}
@Override
public void onEnable() {
this.commandsManager.register(new ru.tehkode.permissions.bukkit.commands.PermissionsCommand());
this.registerEvents();
logger.log(Level.INFO, "[PermissionsEx] version [" + this.getDescription().getVersion() + "] (" + this.getDescription().getVersion() + ") loaded");
}
@Override
public void onDisable() {
logger.log(Level.INFO, "[PermissionsEx-" + this.getDescription().getVersion() + "] disabled successfully.");
}
@Override
public boolean onCommand(CommandSender sender, Command command, String commandLabel, String[] args) {
PluginDescriptionFile pdfFile = this.getDescription();
if (args.length > 0) {
return this.commandsManager.execute(sender, command, args);
} else {
if (sender instanceof Player) {
sender.sendMessage(ChatColor.WHITE + "[PermissionsEx]: Running (" + pdfFile.getVersion() + ")");
return !this.permissionsManager.has((Player) sender, "permissions.manage");
} else {
sender.sendMessage("[" + pdfFile.getName() + "] version [" + pdfFile.getVersion() + "] loaded");
return false;
}
}
}
public static PermissionManager getPermissionManager() {
Plugin plugin = Bukkit.getServer().getPluginManager().getPlugin("PermissionsEx");
if (plugin == null || !(plugin instanceof PermissionsPlugin)) {
throw new RuntimeException("Permissions manager are not accessable. PermissionsEx plugin disabled?");
}
return ((PermissionsPlugin) plugin).permissionsManager;
}
protected Configuration loadConfig(String name) {
File configurationFile = new File(getDataFolder(), configFile);
Configuration config;
if (!configurationFile.exists()) {
try {
if (!getDataFolder().exists()) {
getDataFolder().mkdirs();
}
configurationFile.createNewFile(); // Try to create new one
config = new Configuration(configurationFile);
config.setProperty("permissions.basedir", getDataFolder().getPath());
config.save();
} catch (IOException e) {
// And if failed (ex.: not enough rights) - catch exception
throw new RuntimeException(e); // Rethrow exception
}
} else {
config = new Configuration(configurationFile);
config.load();
}
return config;
}
protected void registerEvents() {
BlockListener blockProtector = new BlockProtector();
PlayerListener playerProtector = new PlayerListener();
EntityListener entityProtector = new EntityListener();
VehicleListener vehicleProtector = new VehicleListener();
PluginManager pluginManager = this.getServer().getPluginManager();
//Block events
pluginManager.registerEvent(Event.Type.BLOCK_PLACE, blockProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.BLOCK_BREAK, blockProtector, Priority.Low, this);
//Player events
pluginManager.registerEvent(Event.Type.PLAYER_QUIT, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_BED_ENTER, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_BUCKET_EMPTY, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_BUCKET_FILL, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_CHAT, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_DROP_ITEM, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_INTERACT, playerProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.PLAYER_PICKUP_ITEM, playerProtector, Priority.Low, this);
//Entity events
pluginManager.registerEvent(Event.Type.ENTITY_TARGET, entityProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.ENTITY_INTERACT, entityProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.ENTITY_DAMAGE, entityProtector, Priority.Low, this);
//Vehicle events
pluginManager.registerEvent(Event.Type.VEHICLE_COLLISION_ENTITY, vehicleProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.VEHICLE_ENTER, vehicleProtector, Priority.Low, this);
pluginManager.registerEvent(Event.Type.VEHICLE_DAMAGE, vehicleProtector, Priority.Low, this);
}
private class VehicleListener extends org.bukkit.event.vehicle.VehicleListener {
@Override
public void onVehicleDamage(VehicleDamageEvent event) {
if (!(event.getAttacker() instanceof Player)) {
return;
}
Player player = (Player) event.getAttacker();
if (!permissionsManager.has(player, "modifyworld.vehicle.destroy")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onVehicleEnter(VehicleEnterEvent event) {
if (!(event.getEntered() instanceof Player)) {
return;
}
Player player = (Player) event.getEntered();
if (!permissionsManager.has(player, "modifyworld.vehicle.enter")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onVehicleEntityCollision(VehicleEntityCollisionEvent event) {
if (!(event.getEntity() instanceof Player)) {
return;
}
Player player = (Player) event.getEntity();
if (!permissionsManager.has(player, "modifyworld.vehicle.collide")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
event.setCollisionCancelled(true);
event.setPickupCancelled(true);
}
}
}
private class EntityListener extends org.bukkit.event.entity.EntityListener {
@Override
public void onEntityDamage(EntityDamageEvent event) {
if (event instanceof EntityDamageByEntityEvent) { // player is damager
EntityDamageByEntityEvent edbe = (EntityDamageByEntityEvent) event;
if (!(edbe.getDamager() instanceof Player)) { // not caused by player
return;
}
Player player = (Player) edbe.getDamager();
if (!permissionsManager.has(player, "modifyworld.entity.damage.deal")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
} else if (event.getEntity() instanceof Player) { // player are been damaged by someone
Player player = (Player) event.getEntity();
if (!permissionsManager.has(player, "modifyworld.entity.damage.take")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
}
@Override
public void onEntityTarget(EntityTargetEvent event) {
if (event.getEntity() instanceof Player) {
Player player = (Player) event.getEntity();
if (!permissionsManager.has(player, "modifyworld.entity.mobtarget")) {
player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
}
}
private class PlayerListener extends org.bukkit.event.player.PlayerListener {
@Override
public void onPlayerQuit(PlayerQuitEvent event) {
super.onPlayerQuit(event);
getPermissionManager().resetUser(event.getPlayer().getName());
}
@Override
public void onPlayerBedEnter(PlayerBedEnterEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.usebeds")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerBucketEmpty(PlayerBucketEmptyEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.bucket.empty")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerBucketFill(PlayerBucketFillEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.bucket.fill")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerChat(PlayerChatEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.chat")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerDropItem(PlayerDropItemEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.items.drop")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerPickupItem(PlayerPickupItemEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.items.pickup." + event.getItem().getEntityId())) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onPlayerInteract(PlayerInteractEvent event) {
if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.interact")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
}
private class BlockProtector extends BlockListener {
@Override
public void onBlockBreak(BlockBreakEvent event) {
super.onBlockBreak(event);
if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.destroy")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
@Override
public void onBlockPlace(BlockPlaceEvent event) {
super.onBlockPlace(event);
if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.place")) {
event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
event.setCancelled(true);
}
}
}
}
| src/main/java/ru/tehkode/permissions/bukkit/PermissionsPlugin.java | /*
* PermissionsEx - Permissions plugin for Bukkit
* Copyright (C) 2011 t3hk0d3 http://www.tehkode.ru
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ru.tehkode.permissions.bukkit;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.Event;
import org.bukkit.event.Event.Priority;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockListener;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.plugin.java.JavaPlugin;
import ru.tehkode.permissions.PermissionBackend;
import ru.tehkode.permissions.PermissionManager;
import ru.tehkode.permissions.PermissionUser;
import ru.tehkode.permissions.backends.FileBackend;
import ru.tehkode.permissions.backends.SQLBackend;
import ru.tehkode.permissions.commands.CommandsManager;
import ru.tehkode.permissions.config.Configuration;
/**
*
* @author code
*/
public class PermissionsPlugin extends JavaPlugin {
protected static final String configFile = "config.yml";
protected static final Logger logger = Logger.getLogger("Minecraft");
protected PermissionManager permissionsManager;
protected CommandsManager commandsManager;
protected BlockListener blockProtector = new BlockProtector();
public PermissionsPlugin() {
super();
PermissionBackend.registerBackendAlias("sql", SQLBackend.class);
PermissionBackend.registerBackendAlias("file", FileBackend.class);
logger.log(Level.INFO, "[PermissionsEx] PermissionEx plugin was Initialized.");
}
@Override
public void onLoad() {
this.commandsManager = new CommandsManager(this);
this.permissionsManager = new PermissionManager(this.loadConfig(configFile));
}
@Override
public void onEnable() {
this.commandsManager.register(new ru.tehkode.permissions.bukkit.commands.PermissionsCommand());
this.getServer().getPluginManager().registerEvent(Event.Type.BLOCK_PLACE, this.blockProtector, Priority.Low, this);
this.getServer().getPluginManager().registerEvent(Event.Type.BLOCK_BREAK, this.blockProtector, Priority.Low, this);
this.getServer().getPluginManager().registerEvent(Event.Type.PLAYER_QUIT, new org.bukkit.event.player.PlayerListener(), Priority.Low, this);
logger.log(Level.INFO, "[PermissionsEx] version [" + this.getDescription().getVersion() + "] (" + this.getDescription().getVersion() + ") loaded");
}
@Override
public void onDisable() {
logger.log(Level.INFO, "[PermissionsEx-" + this.getDescription().getVersion() + "] disabled successfully.");
}
@Override
public boolean onCommand(CommandSender sender, Command command, String commandLabel, String[] args) {
PluginDescriptionFile pdfFile = this.getDescription();
if (args.length > 0) {
return this.commandsManager.execute(sender, command, args);
} else {
if (sender instanceof Player) {
sender.sendMessage(ChatColor.WHITE + "[PermissionsEx]: Running (" + pdfFile.getVersion() + ")");
return !this.permissionsManager.has((Player) sender, "permissions.manage");
} else {
sender.sendMessage("[" + pdfFile.getName() + "] version [" + pdfFile.getVersion() + "] loaded");
return false;
}
}
}
public static PermissionManager getPermissionManager() {
Plugin plugin = Bukkit.getServer().getPluginManager().getPlugin("PermissionsEx");
if (plugin == null || !(plugin instanceof PermissionsPlugin)) {
throw new RuntimeException("Permissions manager are not accessable. PermissionsEx plugin disabled?");
}
return ((PermissionsPlugin) plugin).permissionsManager;
}
protected Configuration loadConfig(String name) {
File configurationFile = new File(getDataFolder(), configFile);
Configuration config;
if (!configurationFile.exists()) {
try {
if (!getDataFolder().exists()) {
getDataFolder().mkdirs();
}
configurationFile.createNewFile(); // Try to create new one
config = new Configuration(configurationFile);
config.setProperty("permissions.basedir", getDataFolder().getPath());
config.save();
} catch (IOException e) {
// And if failed (ex.: not enough rights) - catch exception
throw new RuntimeException(e); // Rethrow exception
}
} else {
config = new Configuration(configurationFile);
config.load();
}
return config;
}
private class PlayerListener extends org.bukkit.event.player.PlayerListener {
@Override
public void onPlayerQuit(PlayerQuitEvent event) {
super.onPlayerQuit(event);
getPermissionManager().resetUser(event.getPlayer().getName());
}
}
private class BlockProtector extends BlockListener {
@Override
public void onBlockBreak(BlockBreakEvent event) {
super.onBlockBreak(event);
Player player = event.getPlayer();
if (!permissionsManager.has(player, "modifyworld.destroy")) {
event.setCancelled(true);
}
}
@Override
public void onBlockPlace(BlockPlaceEvent event) {
super.onBlockPlace(event);
Player player = event.getPlayer();
if (!permissionsManager.has(player, "modifyworld.place")) {
event.setCancelled(true);
}
}
}
}
| Antigrief protecting embedding
| src/main/java/ru/tehkode/permissions/bukkit/PermissionsPlugin.java | Antigrief protecting embedding | <ide><path>rc/main/java/ru/tehkode/permissions/bukkit/PermissionsPlugin.java
<ide>
<ide> import java.io.File;
<ide> import java.io.IOException;
<del>import java.util.Arrays;
<ide> import java.util.logging.Level;
<ide> import java.util.logging.Logger;
<ide> import org.bukkit.Bukkit;
<ide> import org.bukkit.command.Command;
<ide> import org.bukkit.command.CommandSender;
<ide> import org.bukkit.entity.Player;
<del>import org.bukkit.event.Event;
<add>import org.bukkit.event.*;
<ide> import org.bukkit.event.Event.Priority;
<del>import org.bukkit.event.block.BlockBreakEvent;
<del>import org.bukkit.event.block.BlockListener;
<del>import org.bukkit.event.block.BlockPlaceEvent;
<del>import org.bukkit.event.player.PlayerJoinEvent;
<del>import org.bukkit.event.player.PlayerQuitEvent;
<del>import org.bukkit.plugin.Plugin;
<del>import org.bukkit.plugin.PluginDescriptionFile;
<add>import org.bukkit.event.block.*;
<add>import org.bukkit.event.entity.*;
<add>import org.bukkit.event.player.*;
<add>import org.bukkit.event.vehicle.*;
<add>import org.bukkit.plugin.*;
<ide> import org.bukkit.plugin.java.JavaPlugin;
<del>import ru.tehkode.permissions.PermissionBackend;
<del>import ru.tehkode.permissions.PermissionManager;
<del>import ru.tehkode.permissions.PermissionUser;
<del>import ru.tehkode.permissions.backends.FileBackend;
<del>import ru.tehkode.permissions.backends.SQLBackend;
<add>import ru.tehkode.permissions.*;
<add>import ru.tehkode.permissions.backends.*;
<ide> import ru.tehkode.permissions.commands.CommandsManager;
<ide> import ru.tehkode.permissions.config.Configuration;
<ide>
<ide> protected static final Logger logger = Logger.getLogger("Minecraft");
<ide> protected PermissionManager permissionsManager;
<ide> protected CommandsManager commandsManager;
<del> protected BlockListener blockProtector = new BlockProtector();
<ide>
<ide> public PermissionsPlugin() {
<ide> super();
<ide> public void onEnable() {
<ide> this.commandsManager.register(new ru.tehkode.permissions.bukkit.commands.PermissionsCommand());
<ide>
<del> this.getServer().getPluginManager().registerEvent(Event.Type.BLOCK_PLACE, this.blockProtector, Priority.Low, this);
<del> this.getServer().getPluginManager().registerEvent(Event.Type.BLOCK_BREAK, this.blockProtector, Priority.Low, this);
<del>
<del> this.getServer().getPluginManager().registerEvent(Event.Type.PLAYER_QUIT, new org.bukkit.event.player.PlayerListener(), Priority.Low, this);
<add> this.registerEvents();
<ide>
<ide> logger.log(Level.INFO, "[PermissionsEx] version [" + this.getDescription().getVersion() + "] (" + this.getDescription().getVersion() + ") loaded");
<ide> }
<ide> return config;
<ide> }
<ide>
<add> protected void registerEvents() {
<add> BlockListener blockProtector = new BlockProtector();
<add> PlayerListener playerProtector = new PlayerListener();
<add> EntityListener entityProtector = new EntityListener();
<add> VehicleListener vehicleProtector = new VehicleListener();
<add>
<add> PluginManager pluginManager = this.getServer().getPluginManager();
<add>
<add>
<add> //Block events
<add> pluginManager.registerEvent(Event.Type.BLOCK_PLACE, blockProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.BLOCK_BREAK, blockProtector, Priority.Low, this);
<add>
<add> //Player events
<add> pluginManager.registerEvent(Event.Type.PLAYER_QUIT, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_BED_ENTER, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_BUCKET_EMPTY, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_BUCKET_FILL, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_CHAT, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_DROP_ITEM, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_INTERACT, playerProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.PLAYER_PICKUP_ITEM, playerProtector, Priority.Low, this);
<add>
<add> //Entity events
<add> pluginManager.registerEvent(Event.Type.ENTITY_TARGET, entityProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.ENTITY_INTERACT, entityProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.ENTITY_DAMAGE, entityProtector, Priority.Low, this);
<add>
<add> //Vehicle events
<add> pluginManager.registerEvent(Event.Type.VEHICLE_COLLISION_ENTITY, vehicleProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.VEHICLE_ENTER, vehicleProtector, Priority.Low, this);
<add> pluginManager.registerEvent(Event.Type.VEHICLE_DAMAGE, vehicleProtector, Priority.Low, this);
<add>
<add> }
<add>
<add> private class VehicleListener extends org.bukkit.event.vehicle.VehicleListener {
<add>
<add> @Override
<add> public void onVehicleDamage(VehicleDamageEvent event) {
<add> if (!(event.getAttacker() instanceof Player)) {
<add> return;
<add> }
<add>
<add> Player player = (Player) event.getAttacker();
<add> if (!permissionsManager.has(player, "modifyworld.vehicle.destroy")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onVehicleEnter(VehicleEnterEvent event) {
<add> if (!(event.getEntered() instanceof Player)) {
<add> return;
<add> }
<add>
<add> Player player = (Player) event.getEntered();
<add> if (!permissionsManager.has(player, "modifyworld.vehicle.enter")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onVehicleEntityCollision(VehicleEntityCollisionEvent event) {
<add> if (!(event.getEntity() instanceof Player)) {
<add> return;
<add> }
<add>
<add> Player player = (Player) event.getEntity();
<add> if (!permissionsManager.has(player, "modifyworld.vehicle.collide")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> event.setCollisionCancelled(true);
<add> event.setPickupCancelled(true);
<add> }
<add> }
<add> }
<add>
<add> private class EntityListener extends org.bukkit.event.entity.EntityListener {
<add>
<add> @Override
<add> public void onEntityDamage(EntityDamageEvent event) {
<add> if (event instanceof EntityDamageByEntityEvent) { // player is damager
<add> EntityDamageByEntityEvent edbe = (EntityDamageByEntityEvent) event;
<add> if (!(edbe.getDamager() instanceof Player)) { // not caused by player
<add> return;
<add> }
<add>
<add> Player player = (Player) edbe.getDamager();
<add> if (!permissionsManager.has(player, "modifyworld.entity.damage.deal")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> } else if (event.getEntity() instanceof Player) { // player are been damaged by someone
<add> Player player = (Player) event.getEntity();
<add> if (!permissionsManager.has(player, "modifyworld.entity.damage.take")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add> }
<add>
<add> @Override
<add> public void onEntityTarget(EntityTargetEvent event) {
<add> if (event.getEntity() instanceof Player) {
<add> Player player = (Player) event.getEntity();
<add> if (!permissionsManager.has(player, "modifyworld.entity.mobtarget")) {
<add> player.sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add> }
<add> }
<add>
<ide> private class PlayerListener extends org.bukkit.event.player.PlayerListener {
<ide>
<ide> @Override
<ide> super.onPlayerQuit(event);
<ide> getPermissionManager().resetUser(event.getPlayer().getName());
<ide> }
<add>
<add> @Override
<add> public void onPlayerBedEnter(PlayerBedEnterEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.usebeds")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerBucketEmpty(PlayerBucketEmptyEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.bucket.empty")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerBucketFill(PlayerBucketFillEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.bucket.fill")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerChat(PlayerChatEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.chat")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerDropItem(PlayerDropItemEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.items.drop")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerPickupItem(PlayerPickupItemEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.items.pickup." + event.getItem().getEntityId())) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<add>
<add> @Override
<add> public void onPlayerInteract(PlayerInteractEvent event) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.interact")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<add> event.setCancelled(true);
<add> }
<add> }
<ide> }
<ide>
<ide> private class BlockProtector extends BlockListener {
<ide> @Override
<ide> public void onBlockBreak(BlockBreakEvent event) {
<ide> super.onBlockBreak(event);
<del> Player player = event.getPlayer();
<del> if (!permissionsManager.has(player, "modifyworld.destroy")) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.destroy")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<ide> event.setCancelled(true);
<ide> }
<ide> }
<ide> @Override
<ide> public void onBlockPlace(BlockPlaceEvent event) {
<ide> super.onBlockPlace(event);
<del> Player player = event.getPlayer();
<del> if (!permissionsManager.has(player, "modifyworld.place")) {
<add> if (!permissionsManager.has(event.getPlayer(), "modifyworld.blocks.place")) {
<add> event.getPlayer().sendMessage(ChatColor.RED + "Sorry, you don't have enought permissions");
<ide> event.setCancelled(true);
<ide> }
<ide> } |
|
JavaScript | mit | 4dd421f4bef87d4023dfec813ccdcafd0bdb5742 | 0 | CoderKevinZhang/web-front-end-practice,CoderKevinZhang/web-front-end-practice,CoderKevinZhang/web-front-end-practice,CoderKevinZhang/web-front-end-practice | require('normalize.css/normalize.css');
require('styles/App.scss');
import React from 'react';
import ReactDOM from 'react-dom'; // It's important to add this line in order to use ReactDOM.findDOMNode
// let yeomanImage = require('../images/yeoman.png');
// Get the images data from imagesData.json file
let imagesData = require('../data/imagesData.json');
/* Use self-running function to let imagesData collect every image url*/
let imagesLinks = ((imagesDataArray) => {
for (let i = 0, j = imagesDataArray.length; i < j; i++) {
let singleImageData = imagesDataArray[i];
singleImageData.imageURL = require('../images/' + singleImageData.fileName);
imagesDataArray[i] = singleImageData;
}
return imagesDataArray;
})(imagesData);
class ImgFigure extends React.Component {
constructor(props) {
super(props);
this.handleClick = this.handleClick.bind(this);
}
/*
* Reverse the image with 180 degree
* @param e: Event Object
* @return
*/
handleClick(e) {
if (this.props.arrange.isCenter) {
this.props.reverse();
}
else {
this.props.goCenter();
}
e.stopPropagation();
e.preventDefault();
}
render() {
let styleObject = {};
if (this.props.arrange.pos) {
styleObject = this.props.arrange.pos;
}
if (this.props.arrange.isCenter) {
styleObject['zIndex'] = 11;
styleObject['boxShadow'] = '0 0 20px 0 #888888';
}
if (this.props.arrange.rotate) {
let degree = this.props.arrange.rotate;
(['WebkitT', 'MozT', 'msT', 'OT', 't']).forEach((value) => {
styleObject[value + 'ransform'] = 'rotate(' + degree + 'deg)';
})
}
let figureClassName = 'figure-layout';
figureClassName += this.props.arrange.isReverse ? ' flipped' : '';
return (
<figure className={figureClassName} style={styleObject} onClick={this.handleClick}>
<div className="imgFront">
<img src={this.props.data.imageURL} alt={this.props.data.title}/>
<figcaption>
<h2 className="img-title">{this.props.data.title}</h2>
</figcaption>
</div>
<div className="img-back" onClick={this.handleClick}>
<p>
{this.props.data.description}
</p>
</div>
</figure>
);
}
}
class AppComponent extends React.Component {
constructor(props) {
super(props);
this.Contans = {
centerPos: {// center point value
left: 0,
top: 0
},
hPosRange: {// values of right and left parts
leftSecX: [0, 0],
rightSecX: [0, 0],
y: [0, 0]
},
vPosRange: {// values of top part
x: [0, 0],
topSecY: [0, 0]
}
};
this.state = {
imgsArrangeArr: []
}
}
/* Get random range from >=min & <(max - min)
*
* @param min: minimum limit, max: maximum limit
*/
getRandomRange(min, max) {
return Math.floor(Math.random() * (max - min) + min);
}
/* Get random rotation degree from -deg to deg
*
* @param deg: degree of rotation
*/
getRandomDegree(deg) {
return (Math.random() > 0.5 ? '' : '-') + Math.floor(Math.random() * deg);
}
/*
* Flip the image
*
* @param index: the index of the image
* @return {Function} is a closure function
*/
flipImg(index) {
return () => {
let imgsArrangeArr = this.state.imgsArrangeArr;
imgsArrangeArr[index].isReverse = !imgsArrangeArr[index].isReverse;
// update the state of the component
this.setState({
imgsArrangeArr: imgsArrangeArr
});
}
}
moveCenter(index) {
return () => {
this.reArrange(index)
}
}
/* Add images to the center
*
* @param centerIndex: the index of the image
*/
reArrange(centerIndex) {
let imgsArrangeArr = this.state.imgsArrangeArr,
Constans = this.Contans,
centerPos = Constans.centerPos,
hPosRange = Constans.hPosRange,
vPosRange = Constans.vPosRange,
hPosRangeLeftScaleX = hPosRange.leftSecX,
hPosRangeRightScaleX = hPosRange.rightSecX,
hPosRangeY = hPosRange.y,
vPosRangeX = vPosRange.x,
vPosRangeTopY = vPosRange.topSecY;
// set the position of center image & rotation degree is not required
let imgArrayCenter = imgsArrangeArr.splice(centerIndex, 1);
imgArrayCenter[0].pos = centerPos;
imgArrayCenter[0].rotate = 0;
imgArrayCenter[0].isCenter = true;
// set the position of top image & rotation degree is required
let topImgNum = Math.floor(Math.random() * 2), // either one or no img on the top area
topImgIndex = Math.floor(Math.random() * (imgsArrangeArr.length - topImgNum)),
topImgArray = imgsArrangeArr.splice(topImgIndex, topImgNum);
topImgArray.forEach((value, index) => {
topImgArray[index].pos = {
left: this.getRandomRange(vPosRangeX[0], vPosRangeX[1]),
top: this.getRandomRange(vPosRangeTopY[0], vPosRangeTopY[1])
};
topImgArray[index].rotate = this.getRandomDegree(30);
topImgArray[index].isCenter = false;
});
// set the position of left & right images & rotation degree is required
for (let i = 0, j = imgsArrangeArr.length, k = j / 2; i < j; i++) {
let areaLeftorRight = [];
if (i < k) {
areaLeftorRight = hPosRangeLeftScaleX;
} else {
areaLeftorRight = hPosRangeRightScaleX;
}
imgsArrangeArr[i].pos = {
left: this.getRandomRange(areaLeftorRight[0], areaLeftorRight[1]),
top: this.getRandomRange(hPosRangeY[0], hPosRangeY[1])
};
imgsArrangeArr[i].rotate = this.getRandomDegree(30);
imgsArrangeArr[i].isCenter = false;
}
// put topImgArray and imgArrayCenter into imgsArrangeArr
if (topImgArray && topImgArray[0]) {
imgsArrangeArr.splice(topImgIndex, 0, topImgArray[0]);
}
imgsArrangeArr.splice(centerIndex, 0, imgArrayCenter[0]);
// update the state of the component
this.setState({
imgsArrangeArr: imgsArrangeArr
});
}
//在首次实例化时初始化contans,为每张图片计算其位置范围
componentDidMount() {
//获取舞台的大小
let stageDOM = ReactDOM.findDOMNode(this.refs.stage),
stageW = stageDOM.clientWidth,
stageH = stageDOM.clientHeight,
halfStageW = Math.floor(stageW / 2),
halfStageH = Math.floor(stageH / 2);
//获取imgFigure的大小
let imgFigDOM = ReactDOM.findDOMNode(this.refs.image_0),
imgFigW = imgFigDOM.clientWidth,
imgFigH = imgFigDOM.clientHeight,
halfImgW = Math.floor(imgFigW / 2),
halfImgH = Math.floor(imgFigH / 2);
//计算中心图片的位置
this.Contans.centerPos.left = halfStageW - halfImgW;
this.Contans.centerPos.top = halfStageH - halfImgH;
//计算左右区域的位置
this.Contans.hPosRange.leftSecX[0] = -halfImgW;
this.Contans.hPosRange.leftSecX[1] = halfStageW - halfImgW * 3;
this.Contans.hPosRange.rightSecX[0] = halfStageW + halfImgW;
this.Contans.hPosRange.rightSecX[1] = stageW - halfImgW;
this.Contans.hPosRange.y[0] = -halfImgH;
this.Contans.hPosRange.y[1] = stageH - halfImgH;
//计算上区域的位置
this.Contans.vPosRange.x[0] = halfStageW - halfImgW;
this.Contans.vPosRange.x[1] = halfStageW;
this.Contans.vPosRange.topSecY[0] = -halfImgH;
this.Contans.vPosRange.topSecY[1] = halfStageH - halfImgH * 3;
this.reArrange(0);
}
render() {
let imgFigures = [],
controlUnits = [];
imagesLinks.forEach((value, index) => {
if (!this.state.imgsArrangeArr[index]) { // initiate the state of every image
this.state.imgsArrangeArr[index] = {
pos: { // initial position of images
left: 0,
top: 0
},
rotate: 0, // initial the rotation degree of images
isReverse: false, // initiate the reverse status of images
isCenter: false // initiate the status of center position of images
}
}
imgFigures.push(<ImgFigure data={value}
key={index}
ref={'image_' + index}
arrange={this.state.imgsArrangeArr[index]}
reverse={this.flipImg(index)}
goCenter={this.moveCenter(index)}/>)
});
return (
<section className="stage" ref="stage">
<section className="image-sec">
{imgFigures}
</section>
<nav className="controller-nav">
{controlUnits}
</nav>
</section>
);
}
}
AppComponent.defaultProps = {};
ReactDOM.render(
<AppComponent />,
document.getElementById('app')
);
export default AppComponent;
| reactJS/my-gallery-project/src/components/Main.js | require('normalize.css/normalize.css');
require('styles/App.scss');
import React from 'react';
import ReactDOM from 'react-dom'; // It's important to add this line in order to use ReactDOM.findDOMNode
// let yeomanImage = require('../images/yeoman.png');
// Get the images data from imagesData.json file
let imagesData = require('../data/imagesData.json');
/* Use self-running function to let imagesData collect every image url*/
let imagesLinks = ((imagesDataArray) => {
for (let i = 0, j = imagesDataArray.length; i < j; i++) {
let singleImageData = imagesDataArray[i];
singleImageData.imageURL = require('../images/' + singleImageData.fileName);
imagesDataArray[i] = singleImageData;
}
return imagesDataArray;
})(imagesData);
class ImgFigure extends React.Component {
constructor(props) {
super(props);
this.handleClick = this.handleClick.bind(this);
}
/*
* Reverse the image with 180 degree
* @param e: Event Object
* @return
*/
handleClick(e) {
if (this.props.arrange.isCenter) {
this.props.reverse();
}
else {
this.props.goCenter();
}
e.stopPropagation();
e.preventDefault();
}
render() {
let styleObject = {};
if (this.props.arrange.pos) {
styleObject = this.props.arrange.pos;
}
if (this.props.arrange.isCenter) {
styleObject['zIndex'] = 11;
styleObject['boxShadow'] = '0 0 20px 0 #888888';
}
if (this.props.arrange.rotate) {
let degree = this.props.arrange.rotate;
(['WebkitT', 'MozT', 'msT', 'OT', 't']).forEach((value) => {
styleObject[value + 'ransform'] = 'rotate(' + degree + 'deg)';
})
}
let figureClassName = 'figure-layout';
figureClassName += this.props.arrange.isReverse ? ' flipped' : '';
return (
<figure className={figureClassName} style={styleObject} onClick={this.handleClick}>
<div className="imgFront">
<img src={this.props.data.imageURL} alt={this.props.data.title}/>
<figcaption>
<h2 className="img-title">{this.props.data.title}</h2>
</figcaption>
</div>
<div className="img-back" onClick={this.handleClick}>
<p>
{this.props.data.description}
</p>
</div>
</figure>
);
}
}
class AppComponent extends React.Component {
constructor(props) {
super(props);
this.Contans = {
centerPos: {// center point value
left: 0,
top: 0
},
hPosRange: {// values of right and left parts
leftSecX: [0, 0],
rightSecX: [0, 0],
y: [0, 0]
},
vPosRange: {// values of top part
x: [0, 0],
topSecY: [0, 0]
}
};
this.state = {
imgsArrangeArr: []
}
}
/* Get random range from >=min & <(max - min)
*
* @param min: minimum limit, max: maximum limit
*/
getRandomRange(min, max) {
return Math.floor(Math.random() * (max - min) + min);
}
/* Get random rotation degree from -deg to deg
*
* @param deg: degree of rotation
*/
getRandomDegree(deg) {
return (Math.random() > 0.5 ? '' : '-') + Math.floor(Math.random() * deg);
}
/*
* Flip the image
*
* @param index: the index of the image
* @return {Function} is a closure function
*/
flipImg(index) {
return () => {
let imgsArrangeArr = this.state.imgsArrangeArr;
imgsArrangeArr[index].isReverse = !imgsArrangeArr[index].isReverse;
// update the state of the component
this.setState({
imgsArrangeArr: imgsArrangeArr
});
}
}
moveCenter(index) {
return () => {
this.reArrange(index)
}
}
/* Add images to the center
*
* @param centerIndex: the index of the image
*/
reArrange(centerIndex) {
let imgsArrangeArr = this.state.imgsArrangeArr,
Constans = this.Contans,
centerPos = Constans.centerPos,
hPosRange = Constans.hPosRange,
vPosRange = Constans.vPosRange,
hPosRangeLeftScaleX = hPosRange.leftSecX,
hPosRangeRightScaleX = hPosRange.rightSecX,
hPosRangeY = hPosRange.y,
vPosRangeX = vPosRange.x,
vPosRangeTopY = vPosRange.topSecY;
// set the position of center image & rotation degree is not required
let imgArrayCenter = imgsArrangeArr.splice(centerIndex, 1);
imgArrayCenter[0].pos = centerPos;
imgArrayCenter[0].rotate = 0;
imgArrayCenter[0].isCenter = true;
// set the position of top image & rotation degree is required
let topImgNum = Math.floor(Math.random() * 2), // either one or no img on the top area
topImgIndex = Math.floor(Math.random() * (imgsArrangeArr.length - topImgNum)),
topImgArray = imgsArrangeArr.splice(topImgIndex, topImgNum);
topImgArray.forEach((value, index) => {
topImgArray[index].pos = {
left: this.getRandomRange(vPosRangeX[0], vPosRangeX[1]),
top: this.getRandomRange(vPosRangeTopY[0], vPosRangeTopY[1])
};
topImgArray[index].rotate = this.getRandomDegree(30);
topImgArray[index].isCenter = false;
});
// set the position of left & right images & rotation degree is required
for (let i = 0, j = imgsArrangeArr.length, k = j / 2; i < j; i++) {
let areaLeftorRight = [];
if (i < k) {
areaLeftorRight = hPosRangeLeftScaleX;
} else {
areaLeftorRight = hPosRangeRightScaleX;
}
imgsArrangeArr[i].pos = {
left: this.getRandomRange(areaLeftorRight[0], areaLeftorRight[1]),
top: this.getRandomRange(hPosRangeY[0], hPosRangeY[1])
};
imgsArrangeArr[i].rotate = this.getRandomDegree(30);
imgsArrangeArr[i].isCenter = false;
}
// put topImgArray and imgArrayCenter into imgsArrangeArr
if (topImgArray && topImgArray[0]) {
imgsArrangeArr.splice(topImgIndex, 0, topImgArray[0]);
}
imgsArrangeArr.splice(centerIndex, 0, imgArrayCenter[0]);
// update the state of the component
this.setState({
imgsArrangeArr: imgsArrangeArr
});
}
//在首次实例化时初始化contans,为每张图片计算其位置范围
componentDidMount() {
//获取舞台的大小
let stageDOM = ReactDOM.findDOMNode(this.refs.stage),
stageW = stageDOM.clientWidth,
stageH = stageDOM.clientHeight,
halfStageW = Math.floor(stageW / 2),
halfStageH = Math.floor(stageH / 2);
//获取imgFigure的大小
let imgFigDOM = ReactDOM.findDOMNode(this.refs.image_0),
imgFigW = imgFigDOM.clientWidth,
imgFigH = imgFigDOM.clientHeight,
halfImgW = Math.floor(imgFigW / 2),
halfImgH = Math.floor(imgFigH / 2);
//计算中心图片的位置
this.Contans.centerPos.left = halfStageW - halfImgW;
this.Contans.centerPos.top = halfStageH - halfImgH;
//计算左右区域的位置
this.Contans.hPosRange.leftSecX[0] = -halfImgW;
this.Contans.hPosRange.leftSecX[1] = halfStageW - halfImgW * 3;
this.Contans.hPosRange.rightSecX[0] = halfStageW + halfImgW;
this.Contans.hPosRange.rightSecX[1] = stageW - halfImgW;
this.Contans.hPosRange.y[0] = -halfImgH;
this.Contans.hPosRange.y[1] = stageH - halfImgH;
//计算上区域的位置
this.Contans.vPosRange.x[0] = halfStageW - halfImgW;
this.Contans.vPosRange.x[1] = halfStageW;
this.Contans.vPosRange.topSecY[0] = -halfImgH;
this.Contans.vPosRange.topSecY[1] = halfStageH - halfImgH * 3;
this.reArrange(0);
}
render() {
let imgFigures = [],
controlUnits = [];
imagesLinks.forEach((value, index) => {
if (!this.state.imgsArrangeArr[index]) { // initiate the state of every image
this.state.imgsArrangeArr[index] = {
pos: { // initial position of images
left: 0,
top: 0
},
rotate: 0, // initial the rotation degree of images
isReverse: false, // initiate the reverse status of images
isCenter: false // initiate the status of center position of images
}
}
imgFigures.push(<ImgFigure data={value}
key={index}
ref={'image_' + index}
arrange={this.state.imgsArrangeArr[index]}
reverse={this.flipImg(index)}
goCenter={this.moveCenter(index)}/>)
});
return (
<section className="stage" ref="stage">
<section className="image-sec">
{imgFigures}
</section>
<nav className="controller-nav">
{controlUnits}
</nav>
</section>
);
}
}
AppComponent.defaultProps = {};
export default AppComponent;
| add some codes
| reactJS/my-gallery-project/src/components/Main.js | add some codes | <ide><path>eactJS/my-gallery-project/src/components/Main.js
<ide>
<ide> AppComponent.defaultProps = {};
<ide>
<add>ReactDOM.render(
<add> <AppComponent />,
<add> document.getElementById('app')
<add>);
<add>
<ide> export default AppComponent; |
|
Java | apache-2.0 | ab17a7b71cff460021ee6c2df4de797e3f771626 | 0 | smart-fun/smartGL | /*
Copyright 2016 Arnaud Guyon
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fr.arnaudguyon.smartgl.tools;
import android.content.Context;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import fr.arnaudguyon.smartgl.opengl.ColorList;
import fr.arnaudguyon.smartgl.opengl.Face3D;
import fr.arnaudguyon.smartgl.opengl.NormalList;
import fr.arnaudguyon.smartgl.opengl.Object3D;
import fr.arnaudguyon.smartgl.opengl.Texture;
import fr.arnaudguyon.smartgl.opengl.UVList;
import fr.arnaudguyon.smartgl.opengl.VertexList;
/**
* Created by aguyon on 21.11.16.
* Helper to load Wavefront objects and convert them to Object3D
*/
public class WavefrontModel {
private static final String TAG = "WavefrontModel";
public static class Builder {
Context mContext;
int mRawResourceId;
boolean mOptimizeModel = true;
HashMap<String, Texture> mTextures = new HashMap<>();
public Builder(Context context, int rawFileResourceId) {
mContext = context;
mRawResourceId = rawFileResourceId;
}
public Builder optimize(boolean optimizeModel) {
mOptimizeModel = optimizeModel;
return this;
}
public Builder addTexture(String textureName, Texture texture) {
mTextures.put(textureName, texture);
return this;
}
public WavefrontModel create() {
WavefrontModel wavefront = new WavefrontModel();
wavefront.loadObject(mContext, mRawResourceId);
if (mOptimizeModel) {
wavefront.mergeStrips();
}
wavefront.mTextures = mTextures;
return wavefront;
}
}
private static class IndexInfo {
int mVertexIndex;
int mUVIndex;
int mNormalIndex;
static IndexInfo create(Integer vertexIndex, Integer uvIndex, Integer normalIndex) {
if (vertexIndex == null) {
return null;
}
IndexInfo indexInfo = new IndexInfo();
indexInfo.mVertexIndex = vertexIndex - 1;
indexInfo.mUVIndex = (uvIndex != null) ? uvIndex - 1 : 0;
indexInfo.mNormalIndex = (normalIndex != null) ? normalIndex - 1 : 0;
return indexInfo;
}
}
private static class Strip {
String mTextureName;
ArrayList<IndexInfo> mIndexes = new ArrayList<>();
Strip(String textureName) {
mTextureName = textureName;
}
void addIndex(IndexInfo indexInfo) {
mIndexes.add(indexInfo);
}
void addAll(ArrayList<IndexInfo> indexes) {
mIndexes.addAll(indexes);
}
}
private class Vertex {
float mX;
float mY;
float mZ;
float mR, mG, mB;
boolean mHasColors = false;
Vertex(float x, float y, float z) {
mX = x;
mY = y;
mZ = z;
}
void setColors(float r, float g, float b) {
mHasColors = true;
mR = r;
mG = g;
mB = b;
}
}
private class Normal {
float mX;
float mY;
float mZ;
Normal(float x, float y, float z) {
mX = x;
mY = y;
mZ = z;
}
}
private class UV {
float mU;
float mV;
UV(float u, float v) {
mU = u;
mV = v;
}
}
private ArrayList<Vertex> mVertex = new ArrayList<>();
private ArrayList<UV> mUVs = new ArrayList<>();
private ArrayList<Normal> mNormals = new ArrayList<>();
private ArrayList<Strip> mStrips = new ArrayList<>();
private HashMap<String, Texture> mTextures = new HashMap<>();
private WavefrontModel() {
}
private void loadObject(Context context, int rawResId) throws RuntimeException {
InputStream inputStream = context.getResources().openRawResource(rawResId);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line;
try {
String stripTextureName = "";
int lineNumber = 0;
while ((line = bufferedReader.readLine()) != null) {
++lineNumber;
String[] elements = line.split("\\s+");
switch (elements[0]) {
case "v": // VERTEX
if (elements.length > 3) {
Float x = fromString(elements[1]);
Float y = fromString(elements[2]);
Float z = fromString(elements[3]);
if ((x != null) && (y != null) && (z != null)) {
Vertex vertex = new Vertex(x, y, z);
mVertex.add(vertex);
if (elements.length > 6) { // get vertex colors
Float r = fromString(elements[4]);
Float g = fromString(elements[5]);
Float b = fromString(elements[6]);
if ( (r != null) && (g != null) && (b != null) &&
(r >= 0) && (r <= 1) && (g >= 0) && (g <=1) && (b >= 0) && (b <= 1)) {
vertex.setColors(r, g, b);
break;
} else {
throw new RuntimeException("Vertex Colors error line " + lineNumber);
}
}
break;
}
}
throw new RuntimeException("Vertex error line " + lineNumber);
case "vt": // TEXTURE MAPPING
if (elements.length > 2) {
Float u = fromString(elements[1]);
Float v = fromString(elements[2]);
if ((u != null) && (v != null)) {
UV uv = new UV(u, v);
mUVs.add(uv);
break;
}
}
throw new RuntimeException("Texture Mapping error line " + lineNumber);
case "vn": // NORMAL
if (elements.length > 3) {
Float x = fromString(elements[1]);
Float y = fromString(elements[2]);
Float z = fromString(elements[3]);
if ((x != null) && (y != null) && (z != null)) {
Normal normal = new Normal(x, y, z);
mNormals.add(normal);
break;
}
}
throw new RuntimeException("Normal error line " + lineNumber);
case "usemtl": // MATERIAL
if (elements.length > 1) {
stripTextureName = elements[1];
}
break;
case "f": // FACES
if (elements.length == 4) { // Triangle
addFaceStrips(lineNumber, stripTextureName, elements, 1, 3);
} else {
throw new RuntimeException("Only triangles supported, error line " + lineNumber);
}
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
Log.i("DONE", "DONE");
}
private void addFaceStrips(int lineNumber, String materialName, String[] elements, int offsetStart, int numberOfVertex) {
// TODO: check order, seem to be dependant on file loaded :/
// Maybe suggest to use a tool to re-export (http://meshlab.sourceforge.net/ ?)
int[] triangleIndex = {0, 2 , 1};
// int[] quadIndex = {0, 2, 1, 3};
// // TODO: convex polygon should be like 0,1,2 0,2,3 0,3,4 0,4,5 ...
// int[] order = (numberOfVertex == 3) ? triangleIndex : quadIndex;
ArrayList<IndexInfo> indexInfos = new ArrayList<>(numberOfVertex);
for(int i=0; i<numberOfVertex; ++i) {
int index = triangleIndex[i];
Integer vA = intPart(elements, offsetStart + index, 0); // Vertex A
Integer tA = intPart(elements, offsetStart + index, 1); // Texture UV A
Integer nA = intPart(elements, offsetStart + index ,2); // Normal A
IndexInfo indexA = IndexInfo.create(vA, tA, nA);
if (indexA == null) {
throw new RuntimeException("Face error line " + lineNumber);
}
indexInfos.add(indexA);
}
Strip strip = new Strip(materialName);
for(IndexInfo indexInfo : indexInfos) {
strip.addIndex(indexInfo);
}
mStrips.add(strip);
}
private Float fromString(String string) {
try {
return Float.parseFloat(string);
} catch (NumberFormatException exception) {
}
return null;
}
private Integer intPart(String[] elements, int elementNumber, int partNumber) {
String element = elements[elementNumber];
return intPart(element, partNumber);
}
private Integer intPart(String string, int part) {
String[] parts = string.split("/");
if ((parts != null) && (parts.length > part)) {
String firstString = parts[part];
try {
return Integer.parseInt(firstString);
} catch (NumberFormatException exception) {
}
}
return null;
}
// Groups Strips of same material into 1 big strip (1 triangle strip per face)
private void mergeStrips() {
for(int iStrip = 0; iStrip<mStrips.size() - 1; ++iStrip) {
Strip origin = mStrips.get(iStrip);
String originTexture = origin.mTextureName;
for(int iOther = iStrip+1; iOther<mStrips.size(); ++iOther) {
Strip other = mStrips.get(iOther);
if (originTexture.equals(other.mTextureName)) {
IndexInfo originLastIndex = origin.mIndexes.get(origin.mIndexes.size() - 1);
IndexInfo otherFirstIndex = other.mIndexes.get(0);
origin.addIndex(originLastIndex);
origin.addIndex(originLastIndex);
origin.addIndex(otherFirstIndex);
origin.addAll(other.mIndexes);
mStrips.remove(iOther);
--iOther;
}
}
}
}
/**
* Converts the model to an Object3D
* @return an Object3D
*/
public Object3D toObject3D() {
final boolean hasUV = (mUVs.size() > 0);
final boolean hasNormals = (mNormals.size() > 0);
Object3D object3D = new Object3D();
for(Strip strip : mStrips) {
Face3D face3D = new Face3D();
int nbIndex = strip.mIndexes.size();
VertexList vertexList = new VertexList();
vertexList.init(nbIndex);
UVList uvList = null;
ColorList colorList = null;
if (hasUV) {
uvList = new UVList();
uvList.init(nbIndex);
} else {
colorList = new ColorList();
colorList.init(nbIndex);
}
NormalList normalList = null;
if (hasNormals) {
normalList = new NormalList();
normalList.init(nbIndex);
}
for(IndexInfo indexInfo : strip.mIndexes) {
int vertexIndex = indexInfo.mVertexIndex;
Vertex vertex = mVertex.get(vertexIndex);
vertexList.add(vertex.mX, vertex.mY, vertex.mZ);
if (hasUV) {
int uvIndex = indexInfo.mUVIndex;
UV uv = mUVs.get(uvIndex);
uvList.add(uv.mU, uv.mV);
} else if (vertex.mHasColors) {
colorList.add(vertex.mR, vertex.mG, vertex.mB, 1); // TODO: find a way to change the alpha channel?
} else {
throw new RuntimeException("Model must have texture UVs or vertex Colors");
}
if (hasNormals) {
int normalIndex = indexInfo.mNormalIndex;
Normal normal = mNormals.get(normalIndex);
normalList.add(normal.mX, normal.mY, normal.mZ);
}
}
vertexList.finalizeBuffer();
face3D.setVertexList(vertexList);
if (hasUV) {
uvList.finalizeBuffer();
face3D.setUVList(uvList);
Texture texture = mTextures.get(strip.mTextureName);
face3D.setTexture(texture);
} else {
colorList.finalizeBuffer();
face3D.setColorList(colorList);
}
if (hasNormals) {
normalList.finalizeBuffer();
face3D.setNormalList(normalList);
}
object3D.addFace(face3D);
}
return object3D;
}
}
| smartgl/src/main/java/fr/arnaudguyon/smartgl/tools/WavefrontModel.java | /*
Copyright 2016 Arnaud Guyon
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fr.arnaudguyon.smartgl.tools;
import android.content.Context;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import fr.arnaudguyon.smartgl.opengl.ColorList;
import fr.arnaudguyon.smartgl.opengl.Face3D;
import fr.arnaudguyon.smartgl.opengl.NormalList;
import fr.arnaudguyon.smartgl.opengl.Object3D;
import fr.arnaudguyon.smartgl.opengl.Texture;
import fr.arnaudguyon.smartgl.opengl.UVList;
import fr.arnaudguyon.smartgl.opengl.VertexList;
/**
* Created by aguyon on 21.11.16.
* Helper to load Wavefront objects and convert them to Object3D
*/
public class WavefrontModel {
private static final String TAG = "WavefrontModel";
public static class Builder {
Context mContext;
int mRawResourceId;
boolean mOptimizeModel = true;
HashMap<String, Texture> mTextures = new HashMap<>();
public Builder(Context context, int rawFileResourceId) {
mContext = context;
mRawResourceId = rawFileResourceId;
}
public Builder optimize(boolean optimizeModel) {
mOptimizeModel = optimizeModel;
return this;
}
public Builder addTexture(String textureName, Texture texture) {
mTextures.put(textureName, texture);
return this;
}
public WavefrontModel create() {
WavefrontModel wavefront = new WavefrontModel();
wavefront.loadObject(mContext, mRawResourceId);
if (mOptimizeModel) {
wavefront.mergeStrips();
}
wavefront.mTextures = mTextures;
return wavefront;
}
}
private static class IndexInfo {
int mVertexIndex;
int mUVIndex;
int mNormalIndex;
static IndexInfo create(Integer vertexIndex, Integer uvIndex, Integer normalIndex) {
if (vertexIndex == null) {
return null;
}
IndexInfo indexInfo = new IndexInfo();
indexInfo.mVertexIndex = vertexIndex - 1;
indexInfo.mUVIndex = (uvIndex != null) ? uvIndex - 1 : 0;
indexInfo.mNormalIndex = (normalIndex != null) ? normalIndex - 1 : 0;
return indexInfo;
}
}
private static class Strip {
String mTextureName;
ArrayList<IndexInfo> mIndexes = new ArrayList<>();
Strip(String textureName) {
mTextureName = textureName;
}
void addIndex(IndexInfo indexInfo) {
mIndexes.add(indexInfo);
}
void addAll(ArrayList<IndexInfo> indexes) {
mIndexes.addAll(indexes);
}
}
private class Vertex {
float mX;
float mY;
float mZ;
float mR, mG, mB;
boolean mHasColors = false;
Vertex(float x, float y, float z) {
mX = x;
mY = y;
mZ = z;
}
void setColors(float r, float g, float b) {
mHasColors = true;
mR = r;
mG = g;
mB = b;
}
}
private class Normal {
float mX;
float mY;
float mZ;
Normal(float x, float y, float z) {
mX = x;
mY = y;
mZ = z;
}
}
private class UV {
float mU;
float mV;
UV(float u, float v) {
mU = u;
mV = v;
}
}
private ArrayList<Vertex> mVertex = new ArrayList<>();
private ArrayList<UV> mUVs = new ArrayList<>();
private ArrayList<Normal> mNormals = new ArrayList<>();
private ArrayList<Strip> mStrips = new ArrayList<>();
private HashMap<String, Texture> mTextures = new HashMap<>();
private WavefrontModel() {
}
private void loadObject(Context context, int rawResId) throws RuntimeException {
InputStream inputStream = context.getResources().openRawResource(rawResId);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
String line;
try {
String stripTextureName = "";
int lineNumber = 0;
while ((line = bufferedReader.readLine()) != null) {
++lineNumber;
line = line.replace(" ", " ");
String[] elements = line.split(" ");
switch (elements[0]) {
case "v": // VERTEX
if (elements.length > 3) {
Float x = fromString(elements[1]);
Float y = fromString(elements[2]);
Float z = fromString(elements[3]);
if ((x != null) && (y != null) && (z != null)) {
Vertex vertex = new Vertex(x, y, z);
mVertex.add(vertex);
if (elements.length > 6) { // get vertex colors
Float r = fromString(elements[4]);
Float g = fromString(elements[5]);
Float b = fromString(elements[6]);
if ( (r != null) && (g != null) && (b != null) &&
(r >= 0) && (r <= 1) && (g >= 0) && (g <=1) && (b >= 0) && (b <= 1)) {
vertex.setColors(r, g, b);
break;
} else {
throw new RuntimeException("Vertex Colors error line " + lineNumber);
}
}
break;
}
}
throw new RuntimeException("Vertex error line " + lineNumber);
case "vt": // TEXTURE MAPPING
if (elements.length > 2) {
Float u = fromString(elements[1]);
Float v = fromString(elements[2]);
if ((u != null) && (v != null)) {
UV uv = new UV(u, v);
mUVs.add(uv);
break;
}
}
throw new RuntimeException("Texture Mapping error line " + lineNumber);
case "vn": // NORMAL
if (elements.length > 3) {
Float x = fromString(elements[1]);
Float y = fromString(elements[2]);
Float z = fromString(elements[3]);
if ((x != null) && (y != null) && (z != null)) {
Normal normal = new Normal(x, y, z);
mNormals.add(normal);
break;
}
}
throw new RuntimeException("Normal error line " + lineNumber);
case "usemtl": // MATERIAL
if (elements.length > 1) {
stripTextureName = elements[1];
}
break;
case "f": // FACES
if (elements.length == 4) { // Triangle
addFaceStrips(lineNumber, stripTextureName, elements, 1, 3);
} else {
throw new RuntimeException("Only triangles supported, error line " + lineNumber);
}
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
Log.i("DONE", "DONE");
}
private void addFaceStrips(int lineNumber, String materialName, String[] elements, int offsetStart, int numberOfVertex) {
// TODO: check order, seem to be dependant on file loaded :/
// Maybe suggest to use a tool to re-export (http://meshlab.sourceforge.net/ ?)
int[] triangleIndex = {0, 2 , 1};
// int[] quadIndex = {0, 2, 1, 3};
// // TODO: convex polygon should be like 0,1,2 0,2,3 0,3,4 0,4,5 ...
// int[] order = (numberOfVertex == 3) ? triangleIndex : quadIndex;
ArrayList<IndexInfo> indexInfos = new ArrayList<>(numberOfVertex);
for(int i=0; i<numberOfVertex; ++i) {
int index = triangleIndex[i];
Integer vA = intPart(elements, offsetStart + index, 0); // Vertex A
Integer tA = intPart(elements, offsetStart + index, 1); // Texture UV A
Integer nA = intPart(elements, offsetStart + index ,2); // Normal A
IndexInfo indexA = IndexInfo.create(vA, tA, nA);
if (indexA == null) {
throw new RuntimeException("Face error line " + lineNumber);
}
indexInfos.add(indexA);
}
Strip strip = new Strip(materialName);
for(IndexInfo indexInfo : indexInfos) {
strip.addIndex(indexInfo);
}
mStrips.add(strip);
}
private Float fromString(String string) {
try {
return Float.parseFloat(string);
} catch (NumberFormatException exception) {
}
return null;
}
private Integer intPart(String[] elements, int elementNumber, int partNumber) {
String element = elements[elementNumber];
return intPart(element, partNumber);
}
private Integer intPart(String string, int part) {
String[] parts = string.split("/");
if ((parts != null) && (parts.length > part)) {
String firstString = parts[part];
try {
return Integer.parseInt(firstString);
} catch (NumberFormatException exception) {
}
}
return null;
}
// Groups Strips of same material into 1 big strip (1 triangle strip per face)
private void mergeStrips() {
for(int iStrip = 0; iStrip<mStrips.size() - 1; ++iStrip) {
Strip origin = mStrips.get(iStrip);
String originTexture = origin.mTextureName;
for(int iOther = iStrip+1; iOther<mStrips.size(); ++iOther) {
Strip other = mStrips.get(iOther);
if (originTexture.equals(other.mTextureName)) {
IndexInfo originLastIndex = origin.mIndexes.get(origin.mIndexes.size() - 1);
IndexInfo otherFirstIndex = other.mIndexes.get(0);
origin.addIndex(originLastIndex);
origin.addIndex(originLastIndex);
origin.addIndex(otherFirstIndex);
origin.addAll(other.mIndexes);
mStrips.remove(iOther);
--iOther;
}
}
}
}
/**
* Converts the model to an Object3D
* @return an Object3D
*/
public Object3D toObject3D() {
final boolean hasUV = (mUVs.size() > 0);
final boolean hasNormals = (mNormals.size() > 0);
Object3D object3D = new Object3D();
for(Strip strip : mStrips) {
Face3D face3D = new Face3D();
int nbIndex = strip.mIndexes.size();
VertexList vertexList = new VertexList();
vertexList.init(nbIndex);
UVList uvList = null;
ColorList colorList = null;
if (hasUV) {
uvList = new UVList();
uvList.init(nbIndex);
} else {
colorList = new ColorList();
colorList.init(nbIndex);
}
NormalList normalList = null;
if (hasNormals) {
normalList = new NormalList();
normalList.init(nbIndex);
}
for(IndexInfo indexInfo : strip.mIndexes) {
int vertexIndex = indexInfo.mVertexIndex;
Vertex vertex = mVertex.get(vertexIndex);
vertexList.add(vertex.mX, vertex.mY, vertex.mZ);
if (hasUV) {
int uvIndex = indexInfo.mUVIndex;
UV uv = mUVs.get(uvIndex);
uvList.add(uv.mU, uv.mV);
} else if (vertex.mHasColors) {
colorList.add(vertex.mR, vertex.mG, vertex.mB, 1); // TODO: find a way to change the alpha channel?
} else {
throw new RuntimeException("Model must have texture UVs or vertex Colors");
}
if (hasNormals) {
int normalIndex = indexInfo.mNormalIndex;
Normal normal = mNormals.get(normalIndex);
normalList.add(normal.mX, normal.mY, normal.mZ);
}
}
vertexList.finalizeBuffer();
face3D.setVertexList(vertexList);
if (hasUV) {
uvList.finalizeBuffer();
face3D.setUVList(uvList);
Texture texture = mTextures.get(strip.mTextureName);
face3D.setTexture(texture);
} else {
colorList.finalizeBuffer();
face3D.setColorList(colorList);
}
if (hasNormals) {
normalList.finalizeBuffer();
face3D.setNormalList(normalList);
}
object3D.addFace(face3D);
}
return object3D;
}
}
| Improvement to allow multiple spaces in wavefront format
| smartgl/src/main/java/fr/arnaudguyon/smartgl/tools/WavefrontModel.java | Improvement to allow multiple spaces in wavefront format | <ide><path>martgl/src/main/java/fr/arnaudguyon/smartgl/tools/WavefrontModel.java
<ide> int lineNumber = 0;
<ide> while ((line = bufferedReader.readLine()) != null) {
<ide> ++lineNumber;
<del> line = line.replace(" ", " ");
<del> String[] elements = line.split(" ");
<add> String[] elements = line.split("\\s+");
<ide> switch (elements[0]) {
<ide> case "v": // VERTEX
<ide> if (elements.length > 3) { |
|
Java | mit | eac24918f2edb1803fd283ce98a14a01a971e033 | 0 | wizzardo/jrtorrent,wizzardo/jrtorrent | package com.wizzardo.jrt;
import com.wizzardo.http.FileTreeHandler;
import com.wizzardo.http.MultipartHandler;
import com.wizzardo.http.RestHandler;
import com.wizzardo.http.filter.GzipFilter;
import com.wizzardo.http.framework.ControllerHandler;
import com.wizzardo.http.framework.WebApplication;
import com.wizzardo.http.framework.di.DependencyFactory;
import com.wizzardo.http.framework.di.SingletonDependency;
import com.wizzardo.http.framework.message.MessageBundle;
import com.wizzardo.jmx.GcStatsRegistrar;
/**
* Created by wizzardo on 07.12.15.
*/
public class App {
final WebApplication server;
public App(String[] args) {
server = new WebApplication(args);
server.onSetup(app -> {
DependencyFactory.get(MessageBundle.class).load("messages");
// DependencyFactory.get().register(RTorrentService.class, new SingletonDependency<>(MockRTorrentService.class));
String downloads = app.getConfig().config("jrt").get("downloads", "./");
app.getUrlMapping()
.append("/", AppController.class, "index")
.append("/addTorrent", new MultipartHandler(new ControllerHandler<>(AppController.class, "addTorrent")))
.append("/downloads/*", new RestHandler("downloads")
.get(new FileTreeHandler(downloads, "/downloads")
.setShowFolder(false)))
.append("/ws", "ws", AppWebSocketHandler.class)
.append("/tags.js", AppController.class, "tags")
;
app.getFiltersMapping()
.addAfter("/tags.js", new GzipFilter())
;
});
server.start();
GcStatsRegistrar.registerBeans();
}
public static void main(String[] args) {
new App(args);
}
}
| src/main/java/com/wizzardo/jrt/App.java | package com.wizzardo.jrt;
import com.wizzardo.http.FileTreeHandler;
import com.wizzardo.http.MultipartHandler;
import com.wizzardo.http.RestHandler;
import com.wizzardo.http.filter.GzipFilter;
import com.wizzardo.http.framework.ControllerHandler;
import com.wizzardo.http.framework.Environment;
import com.wizzardo.http.framework.WebApplication;
import com.wizzardo.http.framework.di.DependencyFactory;
import com.wizzardo.http.framework.di.SingletonDependency;
import com.wizzardo.http.framework.message.MessageBundle;
import com.wizzardo.jmx.GcStatsRegistrar;
/**
* Created by wizzardo on 07.12.15.
*/
public class App {
final WebApplication server;
public App(Environment environment) {
server = new WebApplication();
server.onSetup(app -> {
DependencyFactory.getDependency(MessageBundle.class).load("messages");
// DependencyFactory.get().register(RTorrentService.class, new SingletonDependency<>(MockRTorrentService.class));
String downloads = app.getConfig().config("jrt").get("downloads", "./");
app.getUrlMapping()
.append("/", AppController.class, "index")
.append("/addTorrent", new MultipartHandler(new ControllerHandler<>(AppController.class, "addTorrent")))
.append("/downloads/*", new RestHandler("downloads").get(new FileTreeHandler(downloads, "/downloads")
.setShowFolder(false)))
.append("/ws", "ws", DependencyFactory.getDependency(AppWebSocketHandler.class))
.append("/tags.js", AppController.class, "tags")
;
app.getFiltersMapping()
.addAfter("/tags.js", new GzipFilter())
;
});
server.setEnvironment(environment);
server.start();
GcStatsRegistrar.registerBeans();
}
public static void main(String[] args) {
Environment environment = args.length == 1 && args[0].startsWith("-env=") ? Environment.parse(args[0].substring(5)) : Environment.DEVELOPMENT;
new App(environment);
}
}
| update
| src/main/java/com/wizzardo/jrt/App.java | update | <ide><path>rc/main/java/com/wizzardo/jrt/App.java
<ide> import com.wizzardo.http.RestHandler;
<ide> import com.wizzardo.http.filter.GzipFilter;
<ide> import com.wizzardo.http.framework.ControllerHandler;
<del>import com.wizzardo.http.framework.Environment;
<ide> import com.wizzardo.http.framework.WebApplication;
<ide> import com.wizzardo.http.framework.di.DependencyFactory;
<ide> import com.wizzardo.http.framework.di.SingletonDependency;
<ide> public class App {
<ide> final WebApplication server;
<ide>
<del> public App(Environment environment) {
<del> server = new WebApplication();
<add> public App(String[] args) {
<add> server = new WebApplication(args);
<ide> server.onSetup(app -> {
<del> DependencyFactory.getDependency(MessageBundle.class).load("messages");
<add> DependencyFactory.get(MessageBundle.class).load("messages");
<ide> // DependencyFactory.get().register(RTorrentService.class, new SingletonDependency<>(MockRTorrentService.class));
<ide>
<ide> String downloads = app.getConfig().config("jrt").get("downloads", "./");
<ide> app.getUrlMapping()
<ide> .append("/", AppController.class, "index")
<ide> .append("/addTorrent", new MultipartHandler(new ControllerHandler<>(AppController.class, "addTorrent")))
<del> .append("/downloads/*", new RestHandler("downloads").get(new FileTreeHandler(downloads, "/downloads")
<del> .setShowFolder(false)))
<del> .append("/ws", "ws", DependencyFactory.getDependency(AppWebSocketHandler.class))
<add> .append("/downloads/*", new RestHandler("downloads")
<add> .get(new FileTreeHandler(downloads, "/downloads")
<add> .setShowFolder(false)))
<add> .append("/ws", "ws", AppWebSocketHandler.class)
<ide> .append("/tags.js", AppController.class, "tags")
<ide> ;
<ide> app.getFiltersMapping()
<ide> .addAfter("/tags.js", new GzipFilter())
<ide> ;
<ide> });
<del> server.setEnvironment(environment);
<ide> server.start();
<ide> GcStatsRegistrar.registerBeans();
<ide> }
<ide>
<ide> public static void main(String[] args) {
<del> Environment environment = args.length == 1 && args[0].startsWith("-env=") ? Environment.parse(args[0].substring(5)) : Environment.DEVELOPMENT;
<del> new App(environment);
<add> new App(args);
<ide> }
<ide> } |
|
JavaScript | mit | e22d6a021bf75ce4692c1f3247d40bf9b1508c45 | 0 | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | /**
* This function gives the user an appropriate error message if applicable.
* @returns {void}
*/
function checkForErrors () {
// Set Error messages
const errors = []
const manageUrl = $('#manage-url').data().url
const uploadUrl = $('#upload-url').data().url
const noActiveDocsMsg = `You have no active documents.
Please activate at least one document using the <a href=${manageUrl}>Manage</a> tool
or <a href=${uploadUrl}>upload</a> a new document.`
const noStringMsg = 'You must provide a string to cut on.'
const noCutValMsg = 'You must provide a default cutting value.'
const invalidSegSizeMsg = 'Default cutting: Invalid segment size.'
const invalidOverlapValMsg = 'Default cutting: Invalid overlap value.'
const indivInvalidSegSizeMsg = 'Individual cutting: Invalid segment size.'
const indivInvalidOverlapVal = 'Individual cutting: Invalid overlap value.'
// Confirm that there are active files
if ($('#num_active_files').val() === '0') {
errors.push(noActiveDocsMsg)
}
// If cut by milestone is checked make sure there is a milestone value
if ($('#cutByMS').is(':checked')) {
if ($('#MScutWord').val() === '') {
errors.push(noStringMsg)
}
} else {
// Make sure there is a default cutting value
const overallCutVal = $('#overallcutvalue')
const indivCutVal = $('#individualCutValue')
if (overallCutVal.val() === '') {
errors.push(noCutValMsg)
} else {
const overallcutvalueStr = overallCutVal.val()
const overallcutvalue = parseInt(overallCutVal.val())
const overallOverlapValue = parseInt($('#overallOverlapValue').val())
const individualOverlap = parseInt($('#individualOverlap').val())
const individualCutValueStr = indivCutVal.val()
let individualCutValue = indivCutVal.val()
// Make sure the overall segment size not negative
if (overallcutvalue !== Math.floor(overallcutvalue)) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall segment size not a decimal
if (overallcutvalueStr !== Math.abs(overallcutvalue).toString()) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall segment size not 0
if (overallcutvalue === 0) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall overlap is valid
if ((overallcutvalue <= overallOverlapValue) || (Math.abs(Math.round(overallOverlapValue)) !== overallOverlapValue)) {
errors.push(invalidOverlapValMsg)
}
// If there are individual segment cuts
if (individualCutValue !== '') {
individualCutValue = parseInt(individualCutValue)
// Make sure the individual segment size not negative
if (individualCutValue !== Math.floor(individualCutValue)) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual segment size not a decimal
if (individualCutValueStr !== Math.abs(individualCutValue).toString()) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual segment size not 0
if (individualCutValue === 0) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual overlap is valid
if ((individualCutValue <= individualOverlap) || (Math.abs(Math.round(individualOverlap)) !== individualOverlap)) {
errors.push(indivInvalidOverlapVal)
}
}
}
}
if (errors.length > 0) {
$('#hasErrors').val('true')
$('#status-prepare').css({'visibility': 'hidden'})
$('#error-modal-message').html(errors[0])
$('#error-modal').modal()
} else {
$('#hasErrors').val('false')
}
}
/**
* function to check whether the user needs a warning.
* @returns {void}
*/
function checkForWarnings () {
// load numWord from metadata
const numWordLoad = $('#num-word').data()
// Access array within object
const numWord = numWordLoad.numword
let needsWarning = false
const maxSegs = 100
const defCutTypeValue = $('input[name=\'cutType\']:checked').val() // Cut Type
const cutVal = parseInt($('input[name=\'cutValue\']').val()) // Segment Size
const overVal = parseInt($('#overallOverlapValue').val()) // Overlap Size
const indivDivs = $('.cuttingoptionswrapper.ind') // All individual cutsets
const eltsWithoutIndividualOpts = [] // Elements without individual cutsets
// Check each individual cutset
indivDivs.each(function () {
let thisCutVal = $('#individualCutValue', this).val() // Individual segment size
let thisOverVal = $('#individualOverlap', this).val() // Individual overlap size
// Parse as integers
if (thisCutVal !== '') {
thisCutVal = parseInt(thisCutVal)
thisOverVal = parseInt(thisOverVal)
}
// Get a list of each of the cutset indices
const listindex = indivDivs.index(this)
const currID = activeFileIDs[listindex] // activeFileIDs is defined in the template file
const isCutByMS = $('.indivMS', this).is(':checked') // True if cut by milestone checked
// If not cut by milestone and no segment size, add to no individual cutsets array
if (!isCutByMS && thisCutVal === '') {
eltsWithoutIndividualOpts.push(listindex)
}
// If no segment size
if (thisCutVal !== '') {
// Get segment cut type
const thisCutType = $(`input[name='cutType_${currID}']:checked`).val()
// If not cut by milestone, use num_ variables set in template file
if (!(isCutByMS)) {
// If the number of characters-overlap size/segment size-overlap size > 100
if (thisCutType === 'letters' && (numChar[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
needsWarning = true
// Same for segments and lines
} else if (thisCutType === 'words' && (numWord[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
console.log(numWord)
needsWarning = true
} else if (thisCutType === 'lines' && (numLine[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
needsWarning = true
// Or if the segment size > 100
} else if (thisCutVal > maxSegs && eltsWithoutIndividualOpts.length > 0) {
needsWarning = true
}
}
}
})
// If cut by milestone is checked
if ($('input[name=\'cutByMS\']:checked').length === 0) {
// For cutting by characters
if (defCutTypeValue === 'letters') {
// Check each document without individual options
eltsWithoutIndividualOpts.forEach(function (elt) {
// If the number of characters-segment size/segment size-overlap size > 100
if ((numChar[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
// Do the same with words and lines
} else if (defCutTypeValue === 'words') {
eltsWithoutIndividualOpts.forEach(function (elt) {
if ((numWord[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
} else if (defCutTypeValue === 'lines') {
eltsWithoutIndividualOpts.forEach(function (elt) {
if ((numLine[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
// If the segment size > 100 and there are documents without individual options
} else if (cutVal > maxSegs && eltsWithoutIndividualOpts.length > 0) {
needsWarning = true
}
}
if (needsWarning === true) {
$('#needsWarning').val('true')
const sizeWarning = `Current cut settings will result in over
100 new segments. Please be patient if you continue.`
const footerButtons = `<button type="button" class="btn btn-default" id="warningContinue">Continue Anyway</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>`
$('#warning-modal-footer').html(footerButtons)
$('#warning-modal-message').html(sizeWarning)
// Hide the processing icon and show the modal
$('#status-prepare').css({'visibility': 'hidden'})
$('#warning-modal').modal()
} else {
$('#needsWarning').val('false')
}
}
let xhr
/**
* Performs the ajax request.
* @param {string} action - the action type being requested.
* @returns {void}
*/
function doAjax (action) {
/* It's not really efficient to create a FormData and a json object,
but the former is easier to pass to lexos.py functions, and the
latter is easier for the ajax response to use. */
const numActiveFiles = $('#numActiveFiles').val()
const formData = new FormData($('form')[0])
formData.append('action', action)
const jsonForm = jsonifyForm()
$.extend(jsonForm, {'action': action})
// Initiate a timer to allow user to cancel if processing takes too long
const loadingTimeout = window.setTimeout(function () {
$('#needsWarning').val('true')
const timeWarning = `Lexos seems to be taking a long time. \
This may be because you are cutting a large number of documents.
If not, we suggest that you cancel, reload the page, and try again.`
const footerButtons = `<button type="button" class="btn btn-default" data-dismiss="modal">Continue Anyway</button>
<button type="button" class="btn btn-default" id="timerCancel" >Cancel</button>`
$('#warning-modal-footer').html(footerButtons)
$('#warning-modal-message').html(timeWarning)
$('#warning-modal').modal()
}, 10000) // 10 seconds
xhr = $.ajax({
url: '/doCutting',
type: 'POST',
processData: false, // important
contentType: false,
data: formData,
error: function (jqXHR, textStatus, errorThrown) {
$('#status-prepare').css({'visibility': 'hidden'})
// Show an error if the user has not cancelled the action
if (errorThrown !== 'abort') {
const notApplyMsg = 'Lexos could not apply the cutting actions.'
$('#error-modal-message').html(notApplyMsg)
$('#error-modal').modal()
}
console.log(`bad: ${textStatus}: ${errorThrown}`)
}
}).done(function (response) {
clearTimeout(loadingTimeout)
$('#warning-modal').modal('hide') // Hide the warning if it is displayed
response = JSON.parse(response)
$('#preview-body').empty() // Correct
$.each(response['data'], function () {
const fileID = $(this)[0]
const fileName = $(this)[1]
const fileLabel = fileName
const fileContents = $(this)[3]
const indivCutButtons = `<a id="indivCutButtons_${fileID}" onclick="toggleIndivCutOptions(${fileID});" class="bttn indivCutButtons" role="button">Individual Options</a></legend>`
// CSS truncates the document label
const fieldSet = $(`<fieldSet class="individualpreviewwrapper"><legend class="individualpreviewlegend has-tooltip" style="color:#999; width:90%;margin: auto; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;">${fileLabel} ${indivCutButtons}</fieldSet>`)
const indCutOptsWrap = `<div id="indCutOptsWrap_${fileID}" class="cuttingoptionswrapper ind hidden">\
<fieldSet class="cuttingoptionsfieldSet">\
<legend class="individualcuttingoptionstitle">Individual Cutting Options</legend>\
<div class="cuttingdiv individcut">\
<div class="row">\
<div class="col-md-5">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndLetters_${fileID}" value="letters"/>\
Characters/Segment</label>\
</div>\
<div class="col-md-7">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndWords_${fileID}" value="words"/>\
Tokens/Segment</label>\
</div>\
</div>\
<div class="row cutting-radio">\
<div class="col-md-5">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndLines_${fileID}" value="lines"/>\
Lines/Segment</label>\
</div>\
<div class="col-md-7">\
<label class="radio numberradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndNumber_${fileID}" value="number"/>\
Segments/Document</label>\
</div>\
</div>\
</div>\
<div class="row">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:3%;">\
<label>\
<span id="numOf${fileID}" class="cut-label-text">Number of Segments:</span>\
<input type="number" min="1" step="1" name="cutValue_${fileID}" class="cut-text-input" id="individualCutValue" value=""/>\
</label>\
</div>\
</div>\
<div class="row overlap-div">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:3%;">\
<label>Overlap: \
<input type="number" min="0" name="cutOverlap_${fileID}" class="cut-text-input overlap-input" id="individualOverlap" value="0"/>\
</label>\
</div>\
</div>\
<div id="lastprop-div_${fileID}" class="row lastprop-div">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:1%;">\
<label>Last Proportion Threshold: \
<input type="number" min="0" id="cutLastProp_${fileID}" name="cutLastProp_${fileID}" class="cut-text-input lastprop-input" value="50" style="width:54px;margin-right:3px;"/>\
%</label>\
</div>\
</div>\
<div class="row">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:1%;">\
<label>Cutset Label: \
<input type="text" name="cutsetnaming_${fileID}" class="cutsetnaming" value="${fileName}" style="width:155px;display:inline; margin: auto; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;"/>\
</label>\
</div>\
</div>\
<div class="row cuttingdiv" id="cutByMSdiv">\
<div class="col-sm-4">\
<label>\
<input type="checkbox" class="indivMS" name="cutByMS_${fileID}" id="cutByMS_${fileID}"/>\
Cut by Milestone</label>\
</div>\
<div class="col-sm-8 pull-right" id="MSoptspan" style="display:none;">\
<span>Cut document on this term \
<input type="text" class="indivMSinput" name="MScutWord_${fileID}" id="MScutWord${fileID}" value="" style="margin-left:3px;width:130px;"/>\
</span>\
</div>\
</div>\
</fieldSet>\
</div>`
fieldSet.append(indCutOptsWrap)
if ($.type(fileContents) === 'string') {
fieldSet.append(`<div class="filecontents">${fileContents}</div>`) // Keep this with no whitespace!
} else {
$.each(fileContents, function (i, segment) {
const segmentLabel = segment[0]
const segmentString = segment[1]
fieldSet.append(`<div class="filechunk"><span class="filechunklabel">${segmentLabel}</span><div>${segmentString}</div></div>`)
})
}
$('#preview-body').append(fieldSet)
// Hide the individual cutting wrapper if the form doesn't contain values for it
if (!(`cutType_${fileID}` in formData) && formData[`cutType_${fileID}`] !== '') {
$(`#indCutOptsWrap_${fileID}`).addClass('hidden')
}
// Check the cut type boxes
if (formData['cutTypeInd'] === 'letters') {
$(`#cutTypeIndLetters_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'words') {
$(`#cutTypeIndWords_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'lines') {
$(`#cutTypeIndLines_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'number') {
$(`#cutTypeIndNumber_${fileID}`).prop('checked', true)
$(`#numOf_${fileID}`).html('Number of Segments')
$('#lastprop-div').addClass('transparent')
$(`#cutLastProp_${fileID}`).prop('disabled', true)
}
if (formData['Overlap']) { $(`#cutOverlap_${fileID}`).val(formData['Overlap']) } else { $(`#cutOverlap_${fileID}`).val(0) }
if (formData[`cutLastProp_${fileID}`]) {
$(`#lastprop-div_${fileID}`).val(formData[`#cutLastProp_${fileID}`])
}
if (formData['cutType'] === 'milestone') {
$(`#cutTypeIndNumber_${fileID}`).prop('checked', true)
}
if (formData[`MScutWord_${fileID}`] === 'milestone') {
$(`#MScutWord${fileID}`).val(formData['cuttingoptions']['cutValue'])
}
})
$('.fa-folder-open-o').attr('data-original-title', `You have ${numActiveFiles} active document(s).`)
$('#status-prepare').css({'visibility': 'hidden'})
})
}
/**
* Checks the form data for errors and warnings.
* @param {string} action - the action type being requested.
* @returns {void}
*/
function process (action) {
$('#status-prepare').css({'visibility': 'visible', 'z-index': '400000'})
$('#formAction').val(action)
$.when(checkForErrors()).done(function () {
if ($('#hasErrors').val() === 'false') {
checkForWarnings()
$.when(checkForWarnings()).done(function () {
if ($('#needsWarning').val() === 'false') {
doAjax(action)
}
})
}
})
}
// Handle the Continue button in the warning modal
$(document).on('click', '#warningContinue', function () {
$('#needsWarning').val('false')
const action = $('#formAction').val()
$('#warning-modal').modal('hide')
doAjax(action)
$('#status-prepare').css({'visibility': 'visible', 'z-index': '400000'})
})
// Handle the Timer Cancel button in the warning modal
$(document).on('click', '#timerCancel', function () {
$('#needsWarning').val('false')
$('#hasErrors').val('false')
xhr.abort()
$('#warning-modal-footer').append('<button>Moo</button>')
$('#warning-modal').modal('hide')
$('#status-prepare').css('visibility', 'hidden')
})
/**
* Convert the form data into a JSON object.
* @returns {array} - returns the form data as a json object.
*/
function jsonifyForm () {
const form = {}
$.each($('form').serializeArray(), function (i, field) {
form[field.name] = field.value || ''
})
return form
}
/**
* Performs the download request (through flask) when download button clicked.
*/
$(document).on('click', '#downloadCutting', function () {
// Unfortunately, you can't trigger a download with an ajax request; calling a
// Flask route seems to be the easiest method.
if ($('#num_active_files').val() > '0') {
window.location = '/downloadCutting'
}
})
/**
* Toggles milestone options.
* @returns {void}
*/
function showMilestoneOptions () {
if ($('#cutByMS').is(':checked')) {
$('#MSoptspan').removeClass('hidden')
$('#cuttingdiv').hide()
} else {
$('#MSoptspan').addClass('hidden')
$('#cuttingdiv').show()
}
}
/**
* Document ready function.
*/
$(function () {
$('#actions').addClass('actions-cut')
// Toggle cutting options when radio buttons with different classes are clicked
const timeToToggle = 150
$('.sizeradio').click(function () {
const cuttingValueLabel = $(this).parents('.cuttingoptionswrapper').find('.cut-label-text')
cuttingValueLabel.text('Segment Size:')
$(this).parents('.cuttingoptionswrapper').find('.lastprop-div')
.animate({opacity: 1}, timeToToggle)
.find('.lastprop-input').prop('disabled', false)
$(this).parents('.cuttingoptionswrapper').find('.overlap-div')
.animate({opacity: 1}, timeToToggle)
.find('.overlap-input').prop('disabled', false)
})
$('.numberradio').click(function () {
const cuttingValueLabel = $(this).parents('.cuttingoptionswrapper').find('.cut-label-text')
cuttingValueLabel.text('Number of Segments:')
$(this).parents('.cuttingoptionswrapper').find('.lastprop-div')
.animate({opacity: 0.2}, timeToToggle)
.find('.lastprop-input').prop('disabled', true)
$(this).parents('.cuttingoptionswrapper').find('.overlap-div')
.animate({opacity: 0.2}, timeToToggle)
.find('.overlap-input').prop('disabled', true)
})
// Toggle individual cut option on load.
$('.indivCutButtons').click(function () {
const toggleDiv = $(this).closest('.individualpreviewwrapper').find('.cuttingoptionswrapper')
toggleDiv.toggleClass('hidden')
})
$('#cutByMS').click(showMilestoneOptions)
$(document).on('click', '.indivMS', function () {
showMilestoneOptions()
if ($(this).is(':checked')) {
$(this).parents('#cutByMSdiv').filter(':first').children('#MSoptspan').show()
$(this).parents('#cutByMSdiv').filter(':first')
.parents('.cuttingoptionswrapper').find('.individcut').hide()
} else {
$(this).parents('#cutByMSdiv').filter(':first').children('#MSoptspan').hide()
$(this).parents('#cutByMSdiv').filter(':first')
.parents('.cuttingoptionswrapper').find('.individcut').show()
}
})
})
| lexos/static/js/scripts_cut.js | /**
* This function gives the user an appropriate error message if applicable.
* @returns {void}
*/
function checkForErrors () {
// Set Error messages
const errors = []
const manageUrl = $('#manage-url').data().url
const uploadUrl = $('#upload-url').data().url
const noActiveDocsMsg = `You have no active documents.
Please activate at least one document using the <a href=${manageUrl}>Manage</a> tool
or <a href=${uploadUrl}>upload</a> a new document.`
const noStringMsg = 'You must provide a string to cut on.'
const noCutValMsg = 'You must provide a default cutting value.'
const invalidSegSizeMsg = 'Default cutting: Invalid segment size.'
const invalidOverlapValMsg = 'Default cutting: Invalid overlap value.'
const indivInvalidSegSizeMsg = 'Individual cutting: Invalid segment size.'
const indivInvalidOverlapVal = 'Individual cutting: Invalid overlap value.'
// Confirm that there are active files
if ($('#num_active_files').val() === '0') {
errors.push(noActiveDocsMsg)
}
// If cut by milestone is checked make sure there is a milestone value
if ($('#cutByMS').is(':checked')) {
if ($('#MScutWord').val() === '') {
errors.push(noStringMsg)
}
} else {
// Make sure there is a default cutting value
const overallCutVal = $('#overallcutvalue')
const indivCutVal = $('#individualCutValue')
if (overallCutVal.val() === '') {
errors.push(noCutValMsg)
} else {
const overallcutvalueStr = overallCutVal.val()
const overallcutvalue = parseInt(overallCutVal.val())
const overallOverlapValue = parseInt($('#overallOverlapValue').val())
const individualOverlap = parseInt($('#individualOverlap').val())
const individualCutValueStr = indivCutVal.val()
let individualCutValue = indivCutVal.val()
// Make sure the overall segment size not negative
if (overallcutvalue !== Math.floor(overallcutvalue)) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall segment size not a decimal
if (overallcutvalueStr !== Math.abs(overallcutvalue).toString()) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall segment size not 0
if (overallcutvalue === 0) {
errors.push(invalidSegSizeMsg)
}
// Make sure the overall overlap is valid
if ((overallcutvalue <= overallOverlapValue) || (Math.abs(Math.round(overallOverlapValue)) !== overallOverlapValue)) {
errors.push(invalidOverlapValMsg)
}
// If there are individual segment cuts
if (individualCutValue !== '') {
individualCutValue = parseInt(individualCutValue)
// Make sure the individual segment size not negative
if (individualCutValue !== Math.floor(individualCutValue)) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual segment size not a decimal
if (individualCutValueStr !== Math.abs(individualCutValue).toString()) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual segment size not 0
if (individualCutValue === 0) {
errors.push(indivInvalidSegSizeMsg)
}
// Make sure the individual overlap is valid
if ((individualCutValue <= individualOverlap) || (Math.abs(Math.round(individualOverlap)) !== individualOverlap)) {
errors.push(indivInvalidOverlapVal)
}
}
}
}
if (errors.length > 0) {
$('#hasErrors').val('true')
$('#status-prepare').css({'visibility': 'hidden'})
$('#error-modal-message').html(errors[0])
$('#error-modal').modal()
} else {
$('#hasErrors').val('false')
}
}
/**
* function to check whether the user needs a warning.
* @returns {void}
*/
function checkForWarnings () {
const numWordLoad = $('#num-word').data()
//console.log(numWord.numword[0])
const numWord = numWordLoad.numword[0]
console.log(numWord)
let needsWarning = false
const maxSegs = 100
const defCutTypeValue = $('input[name=\'cutType\']:checked').val() // Cut Type
const cutVal = parseInt($('input[name=\'cutValue\']').val()) // Segment Size
const overVal = parseInt($('#overallOverlapValue').val()) // Overlap Size
const indivDivs = $('.cuttingoptionswrapper.ind') // All individual cutsets
const eltsWithoutIndividualOpts = [] // Elements without individual cutsets
// Check each individual cutset
indivDivs.each(function () {
let thisCutVal = $('#individualCutValue', this).val() // Individual segment size
let thisOverVal = $('#individualOverlap', this).val() // Individual overlap size
// Parse as integers
if (thisCutVal !== '') {
thisCutVal = parseInt(thisCutVal)
thisOverVal = parseInt(thisOverVal)
}
// Get a list of each of the cutset indices
const listindex = indivDivs.index(this)
const currID = activeFileIDs[listindex] // activeFileIDs is defined in the template file
const isCutByMS = $('.indivMS', this).is(':checked') // True if cut by milestone checked
// If not cut by milestone and no segment size, add to no individual cutsets array
if (!isCutByMS && thisCutVal === '') {
eltsWithoutIndividualOpts.push(listindex)
}
// If no segment size
if (thisCutVal !== '') {
// Get segment cut type
const thisCutType = $(`input[name='cutType_${currID}']:checked`).val()
// If not cut by milestone, use num_ variables set in template file
if (!(isCutByMS)) {
// If the number of characters-overlap size/segment size-overlap size > 100
if (thisCutType === 'letters' && (numChar[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
needsWarning = true
// Same for segments and lines
}
else if (thisCutType === 'words' && (numWord - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
console.log(numWord)
needsWarning = true
} else if (thisCutType === 'lines' && (numLine[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
needsWarning = true
// Or if the segment size > 100
} else if (thisCutVal > maxSegs && eltsWithoutIndividualOpts.length > 0) {
needsWarning = true
}
}
}
})
// If cut by milestone is checked
if ($('input[name=\'cutByMS\']:checked').length === 0) {
// For cutting by characters
if (defCutTypeValue === 'letters') {
// Check each document without individual options
eltsWithoutIndividualOpts.forEach(function (elt) {
// If the number of characters-segment size/segment size-overlap size > 100
if ((numChar[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
// Do the same with words and lines
} else if (defCutTypeValue === 'words') {
eltsWithoutIndividualOpts.forEach(function (elt) {
if ((numWord[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
} else if (defCutTypeValue === 'lines') {
eltsWithoutIndividualOpts.forEach(function (elt) {
if ((numLine[elt] - cutVal) / (cutVal - overVal) > maxSegs) {
needsWarning = true
}
})
// If the segment size > 100 and there are documents without individual options
} else if (cutVal > maxSegs && eltsWithoutIndividualOpts.length > 0) {
needsWarning = true
}
}
if (needsWarning === true) {
$('#needsWarning').val('true')
const sizeWarning = `Current cut settings will result in over
100 new segments. Please be patient if you continue.`
const footerButtons = `<button type="button" class="btn btn-default" id="warningContinue">Continue Anyway</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>`
$('#warning-modal-footer').html(footerButtons)
$('#warning-modal-message').html(sizeWarning)
// Hide the processing icon and show the modal
$('#status-prepare').css({'visibility': 'hidden'})
$('#warning-modal').modal()
} else {
$('#needsWarning').val('false')
}
}
let xhr
/**
* Performs the ajax request.
* @param {string} action - the action type being requested.
* @returns {void}
*/
function doAjax (action) {
/* It's not really efficient to create a FormData and a json object,
but the former is easier to pass to lexos.py functions, and the
latter is easier for the ajax response to use. */
const numActiveFiles = $('#numActiveFiles').val()
const formData = new FormData($('form')[0])
formData.append('action', action)
const jsonForm = jsonifyForm()
$.extend(jsonForm, {'action': action})
// Initiate a timer to allow user to cancel if processing takes too long
const loadingTimeout = window.setTimeout(function () {
$('#needsWarning').val('true')
const timeWarning = `Lexos seems to be taking a long time. \
This may be because you are cutting a large number of documents.
If not, we suggest that you cancel, reload the page, and try again.`
const footerButtons = `<button type="button" class="btn btn-default" data-dismiss="modal">Continue Anyway</button>
<button type="button" class="btn btn-default" id="timerCancel" >Cancel</button>`
$('#warning-modal-footer').html(footerButtons)
$('#warning-modal-message').html(timeWarning)
$('#warning-modal').modal()
}, 10000) // 10 seconds
xhr = $.ajax({
url: '/doCutting',
type: 'POST',
processData: false, // important
contentType: false,
data: formData,
error: function (jqXHR, textStatus, errorThrown) {
$('#status-prepare').css({'visibility': 'hidden'})
// Show an error if the user has not cancelled the action
if (errorThrown !== 'abort') {
const notApplyMsg = 'Lexos could not apply the cutting actions.'
$('#error-modal-message').html(notApplyMsg)
$('#error-modal').modal()
}
console.log(`bad: ${textStatus}: ${errorThrown}`)
}
}).done(function (response) {
clearTimeout(loadingTimeout)
$('#warning-modal').modal('hide') // Hide the warning if it is displayed
response = JSON.parse(response)
$('#preview-body').empty() // Correct
$.each(response['data'], function () {
const fileID = $(this)[0]
const fileName = $(this)[1]
const fileLabel = fileName
const fileContents = $(this)[3]
const indivCutButtons = `<a id="indivCutButtons_${fileID}" onclick="toggleIndivCutOptions(${fileID});" class="bttn indivCutButtons" role="button">Individual Options</a></legend>`
// CSS truncates the document label
const fieldSet = $(`<fieldSet class="individualpreviewwrapper"><legend class="individualpreviewlegend has-tooltip" style="color:#999; width:90%;margin: auto; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;">${fileLabel} ${indivCutButtons}</fieldSet>`)
const indCutOptsWrap = `<div id="indCutOptsWrap_${fileID}" class="cuttingoptionswrapper ind hidden">\
<fieldSet class="cuttingoptionsfieldSet">\
<legend class="individualcuttingoptionstitle">Individual Cutting Options</legend>\
<div class="cuttingdiv individcut">\
<div class="row">\
<div class="col-md-5">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndLetters_${fileID}" value="letters"/>\
Characters/Segment</label>\
</div>\
<div class="col-md-7">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndWords_${fileID}" value="words"/>\
Tokens/Segment</label>\
</div>\
</div>\
<div class="row cutting-radio">\
<div class="col-md-5">\
<label class="radio sizeradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndLines_${fileID}" value="lines"/>\
Lines/Segment</label>\
</div>\
<div class="col-md-7">\
<label class="radio numberradio">\
<input type="radio" name="cutType_${fileID}" id="cutTypeIndNumber_${fileID}" value="number"/>\
Segments/Document</label>\
</div>\
</div>\
</div>\
<div class="row">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:3%;">\
<label>\
<span id="numOf${fileID}" class="cut-label-text">Number of Segments:</span>\
<input type="number" min="1" step="1" name="cutValue_${fileID}" class="cut-text-input" id="individualCutValue" value=""/>\
</label>\
</div>\
</div>\
<div class="row overlap-div">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:3%;">\
<label>Overlap: \
<input type="number" min="0" name="cutOverlap_${fileID}" class="cut-text-input overlap-input" id="individualOverlap" value="0"/>\
</label>\
</div>\
</div>\
<div id="lastprop-div_${fileID}" class="row lastprop-div">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:1%;">\
<label>Last Proportion Threshold: \
<input type="number" min="0" id="cutLastProp_${fileID}" name="cutLastProp_${fileID}" class="cut-text-input lastprop-input" value="50" style="width:54px;margin-right:3px;"/>\
%</label>\
</div>\
</div>\
<div class="row">\
<div class="col-md-6 pull-right" style="padding-left:2px;padding-right:1%;">\
<label>Cutset Label: \
<input type="text" name="cutsetnaming_${fileID}" class="cutsetnaming" value="${fileName}" style="width:155px;display:inline; margin: auto; white-space: nowrap; overflow: hidden; text-overflow: ellipsis;"/>\
</label>\
</div>\
</div>\
<div class="row cuttingdiv" id="cutByMSdiv">\
<div class="col-sm-4">\
<label>\
<input type="checkbox" class="indivMS" name="cutByMS_${fileID}" id="cutByMS_${fileID}"/>\
Cut by Milestone</label>\
</div>\
<div class="col-sm-8 pull-right" id="MSoptspan" style="display:none;">\
<span>Cut document on this term \
<input type="text" class="indivMSinput" name="MScutWord_${fileID}" id="MScutWord${fileID}" value="" style="margin-left:3px;width:130px;"/>\
</span>\
</div>\
</div>\
</fieldSet>\
</div>`
fieldSet.append(indCutOptsWrap)
if ($.type(fileContents) === 'string') {
fieldSet.append(`<div class="filecontents">${fileContents}</div>`) // Keep this with no whitespace!
} else {
$.each(fileContents, function (i, segment) {
const segmentLabel = segment[0]
const segmentString = segment[1]
fieldSet.append(`<div class="filechunk"><span class="filechunklabel">${segmentLabel}</span><div>${segmentString}</div></div>`)
})
}
$('#preview-body').append(fieldSet)
// Hide the individual cutting wrapper if the form doesn't contain values for it
if (!(`cutType_${fileID}` in formData) && formData[`cutType_${fileID}`] !== '') {
$(`#indCutOptsWrap_${fileID}`).addClass('hidden')
}
// Check the cut type boxes
if (formData['cutTypeInd'] === 'letters') {
$(`#cutTypeIndLetters_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'words') {
$(`#cutTypeIndWords_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'lines') {
$(`#cutTypeIndLines_${fileID}`).prop('checked', true)
}
if (formData['cutTypeInd'] === 'number') {
$(`#cutTypeIndNumber_${fileID}`).prop('checked', true)
$(`#numOf_${fileID}`).html('Number of Segments')
$('#lastprop-div').addClass('transparent')
$(`#cutLastProp_${fileID}`).prop('disabled', true)
}
if (formData['Overlap']) { $(`#cutOverlap_${fileID}`).val(formData['Overlap']) } else { $(`#cutOverlap_${fileID}`).val(0) }
if (formData[`cutLastProp_${fileID}`]) {
$(`#lastprop-div_${fileID}`).val(formData[`#cutLastProp_${fileID}`])
}
if (formData['cutType'] === 'milestone') {
$(`#cutTypeIndNumber_${fileID}`).prop('checked', true)
}
if (formData[`MScutWord_${fileID}`] === 'milestone') {
$(`#MScutWord${fileID}`).val(formData['cuttingoptions']['cutValue'])
}
})
$('.fa-folder-open-o').attr('data-original-title', `You have ${numActiveFiles} active document(s).`)
$('#status-prepare').css({'visibility': 'hidden'})
})
}
/**
* Checks the form data for errors and warnings.
* @param {string} action - the action type being requested.
* @returns {void}
*/
function process (action) {
$('#status-prepare').css({'visibility': 'visible', 'z-index': '400000'})
$('#formAction').val(action)
$.when(checkForErrors()).done(function () {
if ($('#hasErrors').val() === 'false') {
checkForWarnings()
$.when(checkForWarnings()).done(function () {
if ($('#needsWarning').val() === 'false') {
doAjax(action)
}
})
}
})
}
// Handle the Continue button in the warning modal
$(document).on('click', '#warningContinue', function () {
$('#needsWarning').val('false')
const action = $('#formAction').val()
$('#warning-modal').modal('hide')
doAjax(action)
$('#status-prepare').css({'visibility': 'visible', 'z-index': '400000'})
})
// Handle the Timer Cancel button in the warning modal
$(document).on('click', '#timerCancel', function () {
$('#needsWarning').val('false')
$('#hasErrors').val('false')
xhr.abort()
$('#warning-modal-footer').append('<button>Moo</button>')
$('#warning-modal').modal('hide')
$('#status-prepare').css('visibility', 'hidden')
})
/**
* Convert the form data into a JSON object.
* @returns {array} - returns the form data as a json object.
*/
function jsonifyForm () {
const form = {}
$.each($('form').serializeArray(), function (i, field) {
form[field.name] = field.value || ''
})
return form
}
/**
* Performs the download request (through flask) when download button clicked.
*/
$(document).on('click', '#downloadCutting', function () {
// Unfortunately, you can't trigger a download with an ajax request; calling a
// Flask route seems to be the easiest method.
if ($('#num_active_files').val() > '0') {
window.location = '/downloadCutting'
}
})
/**
* Toggles milestone options.
* @returns {void}
*/
function showMilestoneOptions () {
if ($('#cutByMS').is(':checked')) {
$('#MSoptspan').removeClass('hidden')
$('#cuttingdiv').hide()
} else {
$('#MSoptspan').addClass('hidden')
$('#cuttingdiv').show()
}
}
/**
* Document ready function.
*/
$(function () {
$('#actions').addClass('actions-cut')
// Toggle cutting options when radio buttons with different classes are clicked
const timeToToggle = 150
$('.sizeradio').click(function () {
const cuttingValueLabel = $(this).parents('.cuttingoptionswrapper').find('.cut-label-text')
cuttingValueLabel.text('Segment Size:')
$(this).parents('.cuttingoptionswrapper').find('.lastprop-div')
.animate({opacity: 1}, timeToToggle)
.find('.lastprop-input').prop('disabled', false)
$(this).parents('.cuttingoptionswrapper').find('.overlap-div')
.animate({opacity: 1}, timeToToggle)
.find('.overlap-input').prop('disabled', false)
})
$('.numberradio').click(function () {
const cuttingValueLabel = $(this).parents('.cuttingoptionswrapper').find('.cut-label-text')
cuttingValueLabel.text('Number of Segments:')
$(this).parents('.cuttingoptionswrapper').find('.lastprop-div')
.animate({opacity: 0.2}, timeToToggle)
.find('.lastprop-input').prop('disabled', true)
$(this).parents('.cuttingoptionswrapper').find('.overlap-div')
.animate({opacity: 0.2}, timeToToggle)
.find('.overlap-input').prop('disabled', true)
})
// Toggle individual cut option on load.
$('.indivCutButtons').click(function () {
const toggleDiv = $(this).closest('.individualpreviewwrapper').find('.cuttingoptionswrapper')
toggleDiv.toggleClass('hidden')
})
$('#cutByMS').click(showMilestoneOptions)
$(document).on('click', '.indivMS', function () {
showMilestoneOptions()
if ($(this).is(':checked')) {
$(this).parents('#cutByMSdiv').filter(':first').children('#MSoptspan').show()
$(this).parents('#cutByMSdiv').filter(':first')
.parents('.cuttingoptionswrapper').find('.individcut').hide()
} else {
$(this).parents('#cutByMSdiv').filter(':first').children('#MSoptspan').hide()
$(this).parents('#cutByMSdiv').filter(':first')
.parents('.cuttingoptionswrapper').find('.individcut').show()
}
})
})
| Accessed numWord through metadata.
| lexos/static/js/scripts_cut.js | Accessed numWord through metadata. | <ide><path>exos/static/js/scripts_cut.js
<ide> * @returns {void}
<ide> */
<ide> function checkForWarnings () {
<add> // load numWord from metadata
<ide> const numWordLoad = $('#num-word').data()
<del> //console.log(numWord.numword[0])
<del> const numWord = numWordLoad.numword[0]
<del> console.log(numWord)
<add> // Access array within object
<add> const numWord = numWordLoad.numword
<ide> let needsWarning = false
<ide> const maxSegs = 100
<ide> const defCutTypeValue = $('input[name=\'cutType\']:checked').val() // Cut Type
<ide> if (thisCutType === 'letters' && (numChar[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
<ide> needsWarning = true
<ide> // Same for segments and lines
<del> }
<del>
<del> else if (thisCutType === 'words' && (numWord - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
<add> } else if (thisCutType === 'words' && (numWord[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) {
<ide> console.log(numWord)
<ide> needsWarning = true
<ide> } else if (thisCutType === 'lines' && (numLine[listindex] - thisOverVal) / (thisCutVal - thisOverVal) > maxSegs) { |
|
Java | apache-2.0 | 268f2f00fea6270bd3094a3ebcac5cc36756026a | 0 | freme-project/basic-services,freme-project/basic-services,freme-project/basic-services | package eu.freme.bservices.authenticatedtesthelper;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import com.mashape.unirest.request.HttpRequest;
import eu.freme.bservices.testhelper.AbstractTestHelper;
import eu.freme.common.rest.BaseRestController;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import static org.junit.Assert.assertTrue;
/**
* Created by Arne Binder ([email protected]) on 06.01.2016.
*/
@Component
public class AuthenticatedTestHelper extends AbstractTestHelper {
Logger logger = Logger.getLogger(AuthenticatedTestHelper.class);
private static String tokenWithPermission;
private static String tokenWithOutPermission;
private static String tokenAdmin;
private static boolean authenticated = false;
private static boolean authenticatedRemoved = false;
protected final String usernameWithPermission = "userwithpermission";
protected final String passwordWithPermission = "testpassword";
protected final String usernameWithoutPermission = "userwithoutpermission";
protected final String passwordWithoutPermission = "testpassword";
public void createUser(String username, String password) throws UnirestException {
logger.info("create user: "+username);
HttpResponse<String> response = Unirest.post(getAPIBaseUrl() + "/user")
.queryString("username", username)
.queryString("password", password).asString();
assertTrue(response.getStatus() == HttpStatus.OK.value());
}
public void deleteUser(String username, String token) throws UnirestException{
logger.info("delete user: "+username);
HttpResponse<String> response = addAuthentication(Unirest.delete(getAPIBaseUrl() + "/user/"+username), token).asString();
assertTrue(response.getStatus() == HttpStatus.NO_CONTENT.value());
}
public String authenticateUser(String username, String password) throws UnirestException{
HttpResponse<String> response;
logger.info("login with new user / create token");
response = Unirest
.post(getAPIBaseUrl() + BaseRestController.authenticationEndpoint)
.header("X-Auth-Username", username)
.header("X-Auth-Password", password).asString();
assertTrue(response.getStatus() == HttpStatus.OK.value());
String token = new JSONObject(response.getBody()).getString("token");
return token;
}
/**
* This method creates and authenticats two users, userwithpermission and userwithoutpermission.
* Furthermore the admin token is created.
* @throws UnirestException
*/
public void authenticateUsers() throws UnirestException {
if(!authenticated) {
//Creates two users, one intended to have permission, the other not
createUser(usernameWithPermission, passwordWithPermission);
tokenWithPermission = authenticateUser(usernameWithPermission, passwordWithPermission);
createUser(usernameWithoutPermission, passwordWithoutPermission);
tokenWithOutPermission = authenticateUser(usernameWithoutPermission, passwordWithoutPermission);
//ConfigurableApplicationContext context = IntegrationTestSetup.getApplicationContext();
tokenAdmin = authenticateUser(getAdminUsername(), getAdminPassword());
authenticated = true;
}
}
public void removeAuthenticatedUsers() throws UnirestException {
if(!authenticatedRemoved) {
deleteUser(usernameWithPermission, tokenWithPermission);
deleteUser(usernameWithoutPermission, tokenWithOutPermission);
authenticatedRemoved = true;
}
}
/**
* Use this method to add an authentication header to the request.
* If the given token is null, the request will not be modified.
* @param request The request to add the authentication
* @param token The authentication Token
* @param <T>
* @return The modified request
*/
@SuppressWarnings("unchecked")
private <T extends HttpRequest> T addAuthentication(T request, String token){
if(token==null)
return request;
return (T)request.header("X-Auth-Token", token);
}
public <T extends HttpRequest> T addAuthentication(T request){
return addAuthentication(request, tokenWithPermission);
}
public <T extends HttpRequest> T addAuthenticationWithoutPermission(T request){
return addAuthentication(request, tokenWithOutPermission);
}
public <T extends HttpRequest> T addAuthenticationWithAdmin(T request){
return addAuthentication(request, tokenAdmin);
}
}
| authenticated-test-helper/src/main/java/eu/freme/bservices/authenticatedtesthelper/AuthenticatedTestHelper.java | package eu.freme.bservices.authenticatedtesthelper;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import com.mashape.unirest.request.HttpRequest;
import eu.freme.common.rest.BaseRestController;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import org.springframework.beans.BeansException;
import org.springframework.context.ApplicationContext;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import eu.freme.bservices.testhelper.AbstractTestHelper;
import static org.junit.Assert.assertTrue;
/**
* Created by Arne Binder ([email protected]) on 06.01.2016.
*/
@Component
public class AuthenticatedTestHelper extends AbstractTestHelper {
Logger logger = Logger.getLogger(AuthenticatedTestHelper.class);
//private String baseUrl;
//private String adminUsername;
//private String adminPassword;
private static String tokenWithPermission;
private static String tokenWithOutPermission;
private static String tokenAdmin;
private static boolean authenticated = false;
protected final String usernameWithPermission = "userwithpermission";
protected final String passwordWithPermission = "testpassword";
protected final String usernameWithoutPermission = "userwithoutpermission";
protected final String passwordWithoutPermission = "testpassword";
public void createUser(String username, String password) throws UnirestException {
logger.info("create user: "+username);
HttpResponse<String> response = Unirest.post(getAPIBaseUrl() + "/user")
.queryString("username", username)
.queryString("password", password).asString();
assertTrue(response.getStatus() == HttpStatus.OK.value());
}
public void deleteUser(String username, String token) throws UnirestException{
logger.info("delete user: "+username);
HttpResponse<String> response = addAuthentication(Unirest.delete(getAPIBaseUrl() + "/user/"+username), token).asString();
assertTrue(response.getStatus() == HttpStatus.NO_CONTENT.value());
}
public String authenticateUser(String username, String password) throws UnirestException{
HttpResponse<String> response;
logger.info("login with new user / create token");
response = Unirest
.post(getAPIBaseUrl() + BaseRestController.authenticationEndpoint)
.header("X-Auth-Username", username)
.header("X-Auth-Password", password).asString();
assertTrue(response.getStatus() == HttpStatus.OK.value());
String token = new JSONObject(response.getBody()).getString("token");
return token;
}
/**
* This method creates and authenticats two users, userwithpermission and userwithoutpermission.
* Furthermore the admin token is created.
* @throws UnirestException
*/
public void authenticateUsers() throws UnirestException {
if(!authenticated) {
//Creates two users, one intended to have permission, the other not
createUser(usernameWithPermission, passwordWithPermission);
tokenWithPermission = authenticateUser(usernameWithPermission, passwordWithPermission);
createUser(usernameWithoutPermission, passwordWithoutPermission);
tokenWithOutPermission = authenticateUser(usernameWithoutPermission, passwordWithoutPermission);
//ConfigurableApplicationContext context = IntegrationTestSetup.getApplicationContext();
tokenAdmin = authenticateUser(getAdminUsername(), getAdminPassword());
authenticated = true;
}
}
public void removeAuthenticatedUsers() throws UnirestException {
deleteUser(usernameWithPermission, tokenWithPermission);
deleteUser(usernameWithoutPermission, tokenWithOutPermission);
}
/**
* Use this method to add an authentication header to the request.
* If the given token is null, the request will not be modified.
* @param request The request to add the authentication
* @param token The authentication Token
* @param <T>
* @return The modified request
*/
@SuppressWarnings("unchecked")
private <T extends HttpRequest> T addAuthentication(T request, String token){
if(token==null)
return request;
return (T)request.header("X-Auth-Token", token);
}
public <T extends HttpRequest> T addAuthentication(T request){
return addAuthentication(request, tokenWithPermission);
}
public <T extends HttpRequest> T addAuthenticationWithoutPermission(T request){
return addAuthentication(request, tokenWithOutPermission);
}
public <T extends HttpRequest> T addAuthenticationWithAdmin(T request){
return addAuthentication(request, tokenAdmin);
}
}
| added check: remove authenticated users only, if not already done
| authenticated-test-helper/src/main/java/eu/freme/bservices/authenticatedtesthelper/AuthenticatedTestHelper.java | added check: remove authenticated users only, if not already done | <ide><path>uthenticated-test-helper/src/main/java/eu/freme/bservices/authenticatedtesthelper/AuthenticatedTestHelper.java
<ide> import com.mashape.unirest.http.Unirest;
<ide> import com.mashape.unirest.http.exceptions.UnirestException;
<ide> import com.mashape.unirest.request.HttpRequest;
<add>import eu.freme.bservices.testhelper.AbstractTestHelper;
<ide> import eu.freme.common.rest.BaseRestController;
<ide> import org.apache.log4j.Logger;
<ide> import org.json.JSONObject;
<del>import org.springframework.beans.BeansException;
<del>import org.springframework.context.ApplicationContext;
<ide> import org.springframework.http.HttpStatus;
<ide> import org.springframework.stereotype.Component;
<del>import eu.freme.bservices.testhelper.AbstractTestHelper;
<ide>
<ide> import static org.junit.Assert.assertTrue;
<ide>
<ide>
<ide> Logger logger = Logger.getLogger(AuthenticatedTestHelper.class);
<ide>
<del> //private String baseUrl;
<del> //private String adminUsername;
<del> //private String adminPassword;
<del>
<ide> private static String tokenWithPermission;
<ide> private static String tokenWithOutPermission;
<ide> private static String tokenAdmin;
<ide>
<ide> private static boolean authenticated = false;
<add> private static boolean authenticatedRemoved = false;
<ide>
<ide> protected final String usernameWithPermission = "userwithpermission";
<ide> protected final String passwordWithPermission = "testpassword";
<ide> }
<ide>
<ide> public void removeAuthenticatedUsers() throws UnirestException {
<del> deleteUser(usernameWithPermission, tokenWithPermission);
<del> deleteUser(usernameWithoutPermission, tokenWithOutPermission);
<add> if(!authenticatedRemoved) {
<add> deleteUser(usernameWithPermission, tokenWithPermission);
<add> deleteUser(usernameWithoutPermission, tokenWithOutPermission);
<add> authenticatedRemoved = true;
<add> }
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | 17de041f452c4ebcfb193f3cfeeee5d48ec82458 | 0 | wimsymons/sling,JEBailey/sling,codders/k2-sling-fork,labertasch/sling,tteofili/sling,vladbailescu/sling,plutext/sling,ffromm/sling,tmaret/sling,cleliameneghin/sling,plutext/sling,ist-dresden/sling,mmanski/sling,Nimco/sling,tteofili/sling,tyge68/sling,ieb/sling,mikibrv/sling,trekawek/sling,mikibrv/sling,anchela/sling,nleite/sling,mmanski/sling,tyge68/sling,dulvac/sling,headwirecom/sling,mmanski/sling,tmaret/sling,wimsymons/sling,trekawek/sling,roele/sling,ist-dresden/sling,ieb/sling,JEBailey/sling,plutext/sling,awadheshv/sling,SylvesterAbreu/sling,tteofili/sling,tmaret/sling,ist-dresden/sling,tyge68/sling,nleite/sling,vladbailescu/sling,ieb/sling,roele/sling,awadheshv/sling,Sivaramvt/sling,mcdan/sling,gutsy/sling,headwirecom/sling,wimsymons/sling,klcodanr/sling,headwirecom/sling,plutext/sling,awadheshv/sling,klcodanr/sling,ffromm/sling,labertasch/sling,ieb/sling,tmaret/sling,ffromm/sling,roele/sling,awadheshv/sling,awadheshv/sling,trekawek/sling,klcodanr/sling,sdmcraft/sling,Nimco/sling,labertasch/sling,Sivaramvt/sling,SylvesterAbreu/sling,ieb/sling,plutext/sling,ist-dresden/sling,dulvac/sling,Nimco/sling,cleliameneghin/sling,cleliameneghin/sling,mmanski/sling,tmaret/sling,klcodanr/sling,mcdan/sling,ffromm/sling,Nimco/sling,mcdan/sling,tteofili/sling,SylvesterAbreu/sling,tteofili/sling,sdmcraft/sling,Sivaramvt/sling,ffromm/sling,JEBailey/sling,anchela/sling,klcodanr/sling,tyge68/sling,headwirecom/sling,sdmcraft/sling,gutsy/sling,cleliameneghin/sling,Sivaramvt/sling,vladbailescu/sling,nleite/sling,gutsy/sling,Sivaramvt/sling,SylvesterAbreu/sling,wimsymons/sling,labertasch/sling,ffromm/sling,labertasch/sling,vladbailescu/sling,roele/sling,mcdan/sling,JEBailey/sling,tyge68/sling,Sivaramvt/sling,nleite/sling,ieb/sling,dulvac/sling,wimsymons/sling,trekawek/sling,wimsymons/sling,nleite/sling,nleite/sling,SylvesterAbreu/sling,mmanski/sling,mikibrv/sling,sdmcraft/sling,sdmcraft/sling,ist-dresden/sling,klcodanr/sling,tteofili/sling,mmanski/sling,sdmcraft/sling,vladbailescu/sling,mikibrv/sling,dulvac/sling,trekawek/sling,codders/k2-sling-fork,roele/sling,gutsy/sling,Nimco/sling,anchela/sling,anchela/sling,gutsy/sling,JEBailey/sling,cleliameneghin/sling,dulvac/sling,mikibrv/sling,gutsy/sling,SylvesterAbreu/sling,mikibrv/sling,codders/k2-sling-fork,mcdan/sling,headwirecom/sling,trekawek/sling,anchela/sling,plutext/sling,dulvac/sling,Nimco/sling,tyge68/sling,awadheshv/sling,mcdan/sling | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.servlets.post.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.ResourceNotFoundException;
import org.apache.sling.api.resource.ResourceUtil;
import org.apache.sling.api.servlets.HtmlResponse;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.commons.osgi.OsgiUtil;
import org.apache.sling.servlets.post.SlingPostConstants;
import org.apache.sling.servlets.post.SlingPostOperation;
import org.apache.sling.servlets.post.SlingPostProcessor;
import org.apache.sling.servlets.post.impl.helper.DateParser;
import org.apache.sling.servlets.post.impl.helper.NodeNameGenerator;
import org.apache.sling.servlets.post.impl.operations.CopyOperation;
import org.apache.sling.servlets.post.impl.operations.DeleteOperation;
import org.apache.sling.servlets.post.impl.operations.ModifyOperation;
import org.apache.sling.servlets.post.impl.operations.MoveOperation;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceReference;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* POST servlet that implements the sling client library "protocol"
*
* @scr.component immediate="true" label="%servlet.post.name"
* description="%servlet.post.description"
* @scr.service interface="javax.servlet.Servlet"
* @scr.property name="service.description" value="Sling Post Servlet"
* @scr.property name="service.vendor" value="The Apache Software Foundation"
*
* Use this as the default servlet for POST requests for Sling
* @scr.property name="sling.servlet.resourceTypes"
* value="sling/servlet/default" private="true"
* @scr.property name="sling.servlet.methods" value="POST" private="true"
* @scr.reference name="postProcessor" interface="org.apache.sling.servlets.post.SlingPostProcessor" cardinality="0..n" policy="dynamic"
*/
public class SlingPostServlet extends SlingAllMethodsServlet {
private static final long serialVersionUID = 1837674988291697074L;
/**
* default log
*/
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* @scr.property values.0="EEE MMM dd yyyy HH:mm:ss 'GMT'Z"
* values.1="yyyy-MM-dd'T'HH:mm:ss.SSSZ"
* values.2="yyyy-MM-dd'T'HH:mm:ss" values.3="yyyy-MM-dd"
* values.4="dd.MM.yyyy HH:mm:ss" values.5="dd.MM.yyyy"
*/
private static final String PROP_DATE_FORMAT = "servlet.post.dateFormats";
/**
* @scr.property values.0="title" values.1="jcr:title" values.2="name"
* values.3="description" values.4="jcr:description"
* values.5="abstract"
*/
private static final String PROP_NODE_NAME_HINT_PROPERTIES = "servlet.post.nodeNameHints";
/**
* @scr.property value="20" type="Integer"
*/
private static final String PROP_NODE_NAME_MAX_LENGTH = "servlet.post.nodeNameMaxLength";
/**
* utility class for generating node names
*/
private NodeNameGenerator nodeNameGenerator;
/**
* utility class for parsing date strings
*/
private DateParser dateParser;
private SlingPostOperation modifyOperation;
private final Map<String, SlingPostOperation> postOperations = new HashMap<String, SlingPostOperation>();
private final List<ServiceReference> delayedPostProcessors = new ArrayList<ServiceReference>();
private final List<ServiceReference> postProcessors = new ArrayList<ServiceReference>();
private SlingPostProcessor[] cachedPostProcessors = new SlingPostProcessor[0];
private ComponentContext componentContext;
@Override
public void init() {
// default operation: create/modify
modifyOperation = new ModifyOperation(nodeNameGenerator, dateParser,
getServletContext());
// other predefined operations
postOperations.put(SlingPostConstants.OPERATION_COPY,
new CopyOperation());
postOperations.put(SlingPostConstants.OPERATION_MOVE,
new MoveOperation());
postOperations.put(SlingPostConstants.OPERATION_DELETE,
new DeleteOperation());
}
@Override
public void destroy() {
modifyOperation = null;
postOperations.clear();
}
@Override
protected void doPost(SlingHttpServletRequest request,
SlingHttpServletResponse response) throws IOException {
// prepare the response
HtmlResponse htmlResponse = new HtmlResponse();
htmlResponse.setReferer(request.getHeader("referer"));
SlingPostOperation operation = getSlingPostOperation(request);
if (operation == null) {
htmlResponse.setStatus(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Invalid operation specified for POST request");
} else {
final SlingPostProcessor[] processors;
synchronized ( this.delayedPostProcessors ) {
processors = this.cachedPostProcessors;
}
try {
operation.run(request, htmlResponse, processors);
} catch (ResourceNotFoundException rnfe) {
htmlResponse.setStatus(HttpServletResponse.SC_NOT_FOUND,
rnfe.getMessage());
} catch (Throwable throwable) {
log.debug("Exception while handling POST "
+ request.getResource().getPath() + " with "
+ operation.getClass().getName(), throwable);
htmlResponse.setError(throwable);
}
}
// check for redirect URL if processing succeeded
if (htmlResponse.isSuccessful()) {
String redirect = getRedirectUrl(request, htmlResponse);
if (redirect != null) {
response.sendRedirect(redirect);
return;
}
}
// create a html response and send if unsuccessful or no redirect
htmlResponse.send(response, isSetStatus(request));
}
private SlingPostOperation getSlingPostOperation(
SlingHttpServletRequest request) {
String operation = request.getParameter(SlingPostConstants.RP_OPERATION);
if (operation == null || operation.length() == 0) {
// standard create/modify operation;
return modifyOperation;
}
// named operation, retrieve from map
return postOperations.get(operation);
}
/**
* compute redirect URL (SLING-126)
*
* @param ctx the post processor
* @return the redirect location or <code>null</code>
*/
protected String getRedirectUrl(HttpServletRequest request, HtmlResponse ctx) {
// redirect param has priority (but see below, magic star)
String result = request.getParameter(SlingPostConstants.RP_REDIRECT_TO);
if (result != null && ctx.getPath() != null) {
// redirect to created/modified Resource
int star = result.indexOf('*');
if (star >= 0) {
StringBuffer buf = new StringBuffer();
// anything before the star
if (star > 0) {
buf.append(result.substring(0, star));
}
// append the name of the manipulated node
buf.append(ResourceUtil.getName(ctx.getPath()));
// anything after the star
if (star < result.length() - 1) {
buf.append(result.substring(star + 1));
}
// use the created path as the redirect result
result = buf.toString();
} else if (result.endsWith(SlingPostConstants.DEFAULT_CREATE_SUFFIX)) {
// if the redirect has a trailing slash, append modified node
// name
result = result.concat(ResourceUtil.getName(ctx.getPath()));
}
if (log.isDebugEnabled()) {
log.debug("Will redirect to " + result);
}
}
return result;
}
protected boolean isSetStatus(SlingHttpServletRequest request) {
String statusParam = request.getParameter(SlingPostConstants.RP_STATUS);
if (statusParam == null) {
log.debug(
"getStatusMode: Parameter {} not set, assuming standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
if (SlingPostConstants.STATUS_VALUE_BROWSER.equals(statusParam)) {
log.debug(
"getStatusMode: Parameter {} asks for user-friendly status code",
SlingPostConstants.RP_STATUS);
return false;
}
if (SlingPostConstants.STATUS_VALUE_STANDARD.equals(statusParam)) {
log.debug(
"getStatusMode: Parameter {} asks for standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
log.debug(
"getStatusMode: Parameter {} set to unknown value {}, assuming standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
// ---------- SCR Integration ----------------------------------------------
protected void activate(ComponentContext context) {
synchronized ( this.delayedPostProcessors ) {
this.componentContext = context;
for(final ServiceReference ref : this.delayedPostProcessors) {
this.registerPostProcessor(ref);
}
this.delayedPostProcessors.clear();
}
Dictionary<?, ?> props = context.getProperties();
String[] nameHints = OsgiUtil.toStringArray(props.get(PROP_NODE_NAME_HINT_PROPERTIES));
int nameMax = (int) OsgiUtil.toLong(
props.get(PROP_NODE_NAME_MAX_LENGTH), -1);
nodeNameGenerator = new NodeNameGenerator(nameHints, nameMax);
dateParser = new DateParser();
String[] dateFormats = OsgiUtil.toStringArray(props.get(PROP_DATE_FORMAT));
for (String dateFormat : dateFormats) {
dateParser.register(dateFormat);
}
}
protected void deactivate(ComponentContext context) {
nodeNameGenerator = null;
dateParser = null;
this.componentContext = null;
}
protected void bindPostProcessor(ServiceReference ref) {
synchronized ( this.delayedPostProcessors ) {
if ( this.componentContext == null ) {
this.delayedPostProcessors.add(ref);
} else {
this.registerPostProcessor(ref);
}
}
}
protected void unbindPostProcessor(ServiceReference ref) {
synchronized ( this.delayedPostProcessors ) {
this.delayedPostProcessors.remove(ref);
this.postProcessors.remove(ref);
}
}
protected void registerPostProcessor(ServiceReference ref) {
final int ranking = OsgiUtil.toInteger(ref.getProperty(Constants.SERVICE_RANKING), 0);
int index = 0;
while ( index < this.postProcessors.size() &&
ranking < OsgiUtil.toInteger(this.postProcessors.get(index).getProperty(Constants.SERVICE_RANKING), 0)) {
index++;
}
if ( index == this.postProcessors.size() ) {
this.postProcessors.add(ref);
} else {
this.postProcessors.add(index, ref);
}
this.cachedPostProcessors = new SlingPostProcessor[this.postProcessors.size()];
index = 0;
for(final ServiceReference current : this.postProcessors) {
final SlingPostProcessor processor = (SlingPostProcessor) this.componentContext.locateService("postProcessor", current);
this.cachedPostProcessors[index] = processor;
index++;
}
}
}
| servlets/post/src/main/java/org/apache/sling/servlets/post/impl/SlingPostServlet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.servlets.post.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.ResourceNotFoundException;
import org.apache.sling.api.resource.ResourceUtil;
import org.apache.sling.api.servlets.HtmlResponse;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.commons.osgi.OsgiUtil;
import org.apache.sling.servlets.post.SlingPostConstants;
import org.apache.sling.servlets.post.SlingPostOperation;
import org.apache.sling.servlets.post.SlingPostProcessor;
import org.apache.sling.servlets.post.impl.helper.DateParser;
import org.apache.sling.servlets.post.impl.helper.NodeNameGenerator;
import org.apache.sling.servlets.post.impl.operations.CopyOperation;
import org.apache.sling.servlets.post.impl.operations.DeleteOperation;
import org.apache.sling.servlets.post.impl.operations.ModifyOperation;
import org.apache.sling.servlets.post.impl.operations.MoveOperation;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceReference;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* POST servlet that implements the sling client library "protocol"
*
* @scr.component immediate="true" label="%servlet.post.name"
* description="%servlet.post.description"
* @scr.service interface="javax.servlet.Servlet"
* @scr.property name="service.description" value="Sling Post Servlet"
* @scr.property name="service.vendor" value="The Apache Software Foundation"
*
* Use this as the default servlet for POST requests for Sling
* @scr.property name="sling.servlet.resourceTypes"
* value="sling/servlet/default" private="true"
* @scr.property name="sling.servlet.methods" value="POST" private="true"
* @scr.reference name="postProcessor" interface="org.apache.sling.servlets.post.SlingPostProcessor" cardinality="0..n" policy="dynamic"
*/
public class SlingPostServlet extends SlingAllMethodsServlet {
private static final long serialVersionUID = 1837674988291697074L;
/**
* default log
*/
private final Logger log = LoggerFactory.getLogger(getClass());
/**
* @scr.property values.0="EEE MMM dd yyyy HH:mm:ss 'GMT'Z"
* values.1="yyyy-MM-dd'T'HH:mm:ss.SSSZ"
* values.2="yyyy-MM-dd'T'HH:mm:ss" values.3="yyyy-MM-dd"
* values.4="dd.MM.yyyy HH:mm:ss" values.5="dd.MM.yyyy"
*/
private static final String PROP_DATE_FORMAT = "servlet.post.dateFormats";
/**
* @scr.property values.0="title" values.1="jcr:title" values.2="name"
* values.3="description" values.4="jcr:description"
* values.5="abstract"
*/
private static final String PROP_NODE_NAME_HINT_PROPERTIES = "servlet.post.nodeNameHints";
/**
* @scr.property value="20" type="Integer"
*/
private static final String PROP_NODE_NAME_MAX_LENGTH = "servlet.post.nodeNameMaxLength";
/**
* utility class for generating node names
*/
private NodeNameGenerator nodeNameGenerator;
/**
* utility class for parsing date strings
*/
private DateParser dateParser;
private SlingPostOperation modifyOperation;
private final Map<String, SlingPostOperation> postOperations = new HashMap<String, SlingPostOperation>();
private final List<ServiceReference> delayedPostProcessors = new ArrayList<ServiceReference>();
private final List<ServiceReference> postProcessors = new ArrayList<ServiceReference>();
private SlingPostProcessor[] cachedPostProcessors = new SlingPostProcessor[0];
private ComponentContext componentContext;
@Override
public void init() {
// default operation: create/modify
modifyOperation = new ModifyOperation(nodeNameGenerator, dateParser,
getServletContext());
// other predefined operations
postOperations.put(SlingPostConstants.OPERATION_COPY,
new CopyOperation());
postOperations.put(SlingPostConstants.OPERATION_MOVE,
new MoveOperation());
postOperations.put(SlingPostConstants.OPERATION_DELETE,
new DeleteOperation());
}
@Override
public void destroy() {
modifyOperation = null;
postOperations.clear();
}
@Override
protected void doPost(SlingHttpServletRequest request,
SlingHttpServletResponse response) throws IOException {
// prepare the response
HtmlResponse htmlResponse = new HtmlResponse();
htmlResponse.setReferer(request.getHeader("referer"));
SlingPostOperation operation = getSlingPostOperation(request);
if (operation == null) {
htmlResponse.setStatus(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Invalid operation specified for POST request");
} else {
final SlingPostProcessor[] processors;
synchronized ( this.delayedPostProcessors ) {
processors = this.cachedPostProcessors;
}
try {
operation.run(request, htmlResponse, processors);
} catch (ResourceNotFoundException rnfe) {
htmlResponse.setStatus(HttpServletResponse.SC_NOT_FOUND,
rnfe.getMessage());
} catch (Throwable throwable) {
htmlResponse.setError(throwable);
}
}
// check for redirect URL if processing succeeded
if (htmlResponse.isSuccessful()) {
String redirect = getRedirectUrl(request, htmlResponse);
if (redirect != null) {
response.sendRedirect(redirect);
return;
}
}
// create a html response and send if unsuccessful or no redirect
htmlResponse.send(response, isSetStatus(request));
}
private SlingPostOperation getSlingPostOperation(
SlingHttpServletRequest request) {
String operation = request.getParameter(SlingPostConstants.RP_OPERATION);
if (operation == null || operation.length() == 0) {
// standard create/modify operation;
return modifyOperation;
}
// named operation, retrieve from map
return postOperations.get(operation);
}
/**
* compute redirect URL (SLING-126)
*
* @param ctx the post processor
* @return the redirect location or <code>null</code>
*/
protected String getRedirectUrl(HttpServletRequest request, HtmlResponse ctx) {
// redirect param has priority (but see below, magic star)
String result = request.getParameter(SlingPostConstants.RP_REDIRECT_TO);
if (result != null && ctx.getPath() != null) {
// redirect to created/modified Resource
int star = result.indexOf('*');
if (star >= 0) {
StringBuffer buf = new StringBuffer();
// anything before the star
if (star > 0) {
buf.append(result.substring(0, star));
}
// append the name of the manipulated node
buf.append(ResourceUtil.getName(ctx.getPath()));
// anything after the star
if (star < result.length() - 1) {
buf.append(result.substring(star + 1));
}
// use the created path as the redirect result
result = buf.toString();
} else if (result.endsWith(SlingPostConstants.DEFAULT_CREATE_SUFFIX)) {
// if the redirect has a trailing slash, append modified node
// name
result = result.concat(ResourceUtil.getName(ctx.getPath()));
}
if (log.isDebugEnabled()) {
log.debug("Will redirect to " + result);
}
}
return result;
}
protected boolean isSetStatus(SlingHttpServletRequest request) {
String statusParam = request.getParameter(SlingPostConstants.RP_STATUS);
if (statusParam == null) {
log.debug(
"getStatusMode: Parameter {} not set, assuming standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
if (SlingPostConstants.STATUS_VALUE_BROWSER.equals(statusParam)) {
log.debug(
"getStatusMode: Parameter {} asks for user-friendly status code",
SlingPostConstants.RP_STATUS);
return false;
}
if (SlingPostConstants.STATUS_VALUE_STANDARD.equals(statusParam)) {
log.debug(
"getStatusMode: Parameter {} asks for standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
log.debug(
"getStatusMode: Parameter {} set to unknown value {}, assuming standard status code",
SlingPostConstants.RP_STATUS);
return true;
}
// ---------- SCR Integration ----------------------------------------------
protected void activate(ComponentContext context) {
synchronized ( this.delayedPostProcessors ) {
this.componentContext = context;
for(final ServiceReference ref : this.delayedPostProcessors) {
this.registerPostProcessor(ref);
}
this.delayedPostProcessors.clear();
}
Dictionary<?, ?> props = context.getProperties();
String[] nameHints = OsgiUtil.toStringArray(props.get(PROP_NODE_NAME_HINT_PROPERTIES));
int nameMax = (int) OsgiUtil.toLong(
props.get(PROP_NODE_NAME_MAX_LENGTH), -1);
nodeNameGenerator = new NodeNameGenerator(nameHints, nameMax);
dateParser = new DateParser();
String[] dateFormats = OsgiUtil.toStringArray(props.get(PROP_DATE_FORMAT));
for (String dateFormat : dateFormats) {
dateParser.register(dateFormat);
}
}
protected void deactivate(ComponentContext context) {
nodeNameGenerator = null;
dateParser = null;
this.componentContext = null;
}
protected void bindPostProcessor(ServiceReference ref) {
synchronized ( this.delayedPostProcessors ) {
if ( this.componentContext == null ) {
this.delayedPostProcessors.add(ref);
} else {
this.registerPostProcessor(ref);
}
}
}
protected void unbindPostProcessor(ServiceReference ref) {
synchronized ( this.delayedPostProcessors ) {
this.delayedPostProcessors.remove(ref);
this.postProcessors.remove(ref);
}
}
protected void registerPostProcessor(ServiceReference ref) {
final int ranking = OsgiUtil.toInteger(ref.getProperty(Constants.SERVICE_RANKING), 0);
int index = 0;
while ( index < this.postProcessors.size() &&
ranking < OsgiUtil.toInteger(this.postProcessors.get(index).getProperty(Constants.SERVICE_RANKING), 0)) {
index++;
}
if ( index == this.postProcessors.size() ) {
this.postProcessors.add(ref);
} else {
this.postProcessors.add(index, ref);
}
this.cachedPostProcessors = new SlingPostProcessor[this.postProcessors.size()];
index = 0;
for(final ServiceReference current : this.postProcessors) {
final SlingPostProcessor processor = (SlingPostProcessor) this.componentContext.locateService("postProcessor", current);
this.cachedPostProcessors[index] = processor;
index++;
}
}
}
| SLING-669 log an exception at debug level to have the
stack trace in addition to the message in log file
git-svn-id: c3eb811ccca381e673aa62a65336ec26649ed58c@699286 13f79535-47bb-0310-9956-ffa450edef68
| servlets/post/src/main/java/org/apache/sling/servlets/post/impl/SlingPostServlet.java | SLING-669 log an exception at debug level to have the stack trace in addition to the message in log file | <ide><path>ervlets/post/src/main/java/org/apache/sling/servlets/post/impl/SlingPostServlet.java
<ide> htmlResponse.setStatus(HttpServletResponse.SC_NOT_FOUND,
<ide> rnfe.getMessage());
<ide> } catch (Throwable throwable) {
<add> log.debug("Exception while handling POST "
<add> + request.getResource().getPath() + " with "
<add> + operation.getClass().getName(), throwable);
<ide> htmlResponse.setError(throwable);
<ide> }
<ide> |
|
Java | bsd-3-clause | 8cc0b7fe786e43930f0030ee87c463c4f3d2a28e | 0 | NCIP/camod,NCIP/camod,NCIP/camod,NCIP/camod | /**
*
* $Id: AuthenticationFilter.java,v 1.7 2008-08-14 17:12:21 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.6 2006/11/09 17:33:19 pandyas
* Commented out debug code
*
* Revision 1.5 2006/04/17 19:10:50 pandyas
* Added $Id: AuthenticationFilter.java,v 1.7 2008-08-14 17:12:21 pandyas Exp $ and $log:$
*
*
*/
package gov.nih.nci.camod.util;
import java.io.IOException;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.*;
import javax.servlet.http.*;
public class AuthenticationFilter implements Filter {
private FilterConfig filterConfig;
/**
* Called by the web container to indicate to a filter that it is being
* placed into service.
*/
public void init(FilterConfig filterConfig) throws ServletException {
//System.out.println("AuthenticationFilter.init");
this.filterConfig = filterConfig;
}
/**
* The doFilter method of the Filter is called by the web container each time a
* request/response pair is passed through the chain due to a client request
* for a resource at the end of the chain.
*/
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException,
ServletException {
//System.out.println("AuthenticationFilter.doFilter");
HttpServletRequest req = (HttpServletRequest) request;
HttpServletResponse res = (HttpServletResponse) response;
System.out.println("req.getServletPath()= " + req.getServletPath());
System.out.println("Enter doFilter req.getSession().getId()= " + req.getSession().getId());
System.out.println("Enter doFilter notloggedin= " + (String)req.getSession().getAttribute("notloggedin"));
boolean authorized = false;
String isloginpage = ((HttpServletRequest) request).getRequestURI();
//System.out.println("AuthenticationFilter.doFilter isloginpage= " + isloginpage);
boolean isRequestedSessionIdFromURL = ((HttpServletRequest) request).isRequestedSessionIdFromURL();
//System.out.println("AuthenticationFilter.doFilter isRequestedSessionIdFromURL= " + isRequestedSessionIdFromURL);
if (request instanceof HttpServletRequest) {
if(isloginpage!=null && !isRequestedSessionIdFromURL &&(
isloginpage.endsWith("loginMain.do") ||
isloginpage.endsWith("LoginAction.do") ||
isloginpage.endsWith("/camod/LoginAction.do")
)) {
System.out.println("AuthenticationFilter.doFilter loginMain.do,LoginAction.do,/camod/LoginAction.do loop ");
//just continue, so they can login
generateNewSession((HttpServletRequest) request);
chain.doFilter(request, response);
return;
}
//System.out.println("AuthenticationFilter.doFilter NOT login.do or loginMain.do ");
//check login for caMOD
HttpSession session = ((HttpServletRequest) request).getSession(false);
//System.out.println("AuthenticationFilter.doFilter session= " + session);
if (session != null && !isRequestedSessionIdFromURL){
String loggedin = (String)session.getAttribute("loggedin");
//System.out.println("AuthenticationFilter loggedin= " + loggedin);
// reverse this property in application when this code works
if(loggedin != null && loggedin.equals("true")){
System.out.println("AuthenticationFilter set authorized = true: " );
authorized = true;
}
}
}
if (authorized) {
System.out.println("AuthenticationFilter.doFilter authorized loop");
chain.doFilter(request, response);
return;
} else if (filterConfig != null) {
// redirect to login.jsp from any unauthorized pages (external bookmarks to secure pages, ect)
String unauthorizedPage = filterConfig.getInitParameter("unauthorizedPage");
System.out.println("AuthenticationFilter.doFilter not authorized loop unauthorizedPage= " + unauthorizedPage);
if (unauthorizedPage != null && !"".equals(unauthorizedPage)) {
//System.out.println("unauthorizedPage != null && !.equals(unauthorizedPage) loop: ");
generateNewSession((HttpServletRequest) request);
System.out.println("Generated new session for request ");
//chain.doFilter(request, response);
chain.doFilter(request, response);
return;
}
}
}
private void generateNewSession(HttpServletRequest httpRequest){
System.out.println("AuthenticationFilter generateNewSession enter");
HttpSession session = httpRequest.getSession();
HashMap<String, Object> old = new HashMap<String, Object>();
Enumeration<String> keys = (Enumeration<String>) session.getAttributeNames();
while (keys.hasMoreElements()) {
String key = keys.nextElement();
old.put(key, session.getAttribute(key));
}
//session invalidated
session.invalidate();
// get new session
session = httpRequest.getSession(true);
for (Map.Entry<String, Object> entry : old.entrySet()) {
session.setAttribute(entry.getKey(), entry.getValue());
}
System.out.println("AuthenticationFilter generateNewSession exit");
}
/**
* Called by the web container to indicate to a filter that it is being
* taken out of service.
*/
public void destroy() {
filterConfig = null;
}
}
| software/camod/src/gov/nih/nci/camod/util/AuthenticationFilter.java | /**
*
* $Id: AuthenticationFilter.java,v 1.7 2008-08-14 17:12:21 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.6 2006/11/09 17:33:19 pandyas
* Commented out debug code
*
* Revision 1.5 2006/04/17 19:10:50 pandyas
* Added $Id: AuthenticationFilter.java,v 1.7 2008-08-14 17:12:21 pandyas Exp $ and $log:$
*
*
*/
package gov.nih.nci.camod.util;
import java.io.IOException;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.*;
import javax.servlet.http.*;
public class AuthenticationFilter implements Filter {
private FilterConfig filterConfig;
/**
* Called by the web container to indicate to a filter that it is being
* placed into service.
*/
public void init(FilterConfig filterConfig) throws ServletException {
//System.out.println("AuthenticationFilter.init");
this.filterConfig = filterConfig;
}
/**
* The doFilter method of the Filter is called by the web container each time a
* request/response pair is passed through the chain due to a client request
* for a resource at the end of the chain.
*/
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException,
ServletException {
//System.out.println("AuthenticationFilter.doFilter");
HttpServletRequest req = (HttpServletRequest) request;
HttpServletResponse res = (HttpServletResponse) response;
System.out.println("req.getServletPath()= " + req.getServletPath());
System.out.println("Enter doFilter req.getSession().getId()= " + req.getSession().getId());
System.out.println("Enter doFilter notloggedin= " + (String)req.getSession().getAttribute("notloggedin"));
boolean authorized = false;
String isloginpage = ((HttpServletRequest) request).getRequestURI();
//System.out.println("AuthenticationFilter.doFilter isloginpage= " + isloginpage);
boolean isRequestedSessionIdFromURL = ((HttpServletRequest) request).isRequestedSessionIdFromURL();
//System.out.println("AuthenticationFilter.doFilter isRequestedSessionIdFromURL= " + isRequestedSessionIdFromURL);
if (request instanceof HttpServletRequest) {
if(isloginpage!=null && !isRequestedSessionIdFromURL &&(
isloginpage.endsWith("loginMain.do") ||
isloginpage.endsWith("LoginAction.do") ||
isloginpage.endsWith("/camod/LoginAction.do")
)) {
System.out.println("AuthenticationFilter.doFilter loginMain.do,LoginAction.do,/camod/LoginAction.do loop ");
//just continue, so they can login
generateNewSession((HttpServletRequest) request);
chain.doFilter(request, response);
return;
}
//System.out.println("AuthenticationFilter.doFilter NOT login.do or loginMain.do ");
//check login for caMOD
HttpSession session = ((HttpServletRequest) request).getSession(false);
//System.out.println("AuthenticationFilter.doFilter session= " + session);
if (session != null && !isRequestedSessionIdFromURL){
String loggedin = (String)session.getAttribute("loggedin");
//System.out.println("AuthenticationFilter loggedin= " + loggedin);
// reverse this property in application when this code works
if(loggedin != null && loggedin.equals("true")){
//System.out.println("AuthenticationFilter set authorized = true: " );
authorized = true;
}
}
}
if (authorized) {
System.out.println("AuthenticationFilter.doFilter authorized loop");
chain.doFilter(request, response);
return;
} else if (filterConfig != null) {
// redirect to login.jsp from any unauthorized pages (external bookmarks to secure pages, ect)
String unauthorizedPage = filterConfig.getInitParameter("unauthorizedPage");
System.out.println("AuthenticationFilter.doFilter not authorized loop unauthorizedPage= " + unauthorizedPage);
if (unauthorizedPage != null && !"".equals(unauthorizedPage)) {
//System.out.println("unauthorizedPage != null && !.equals(unauthorizedPage) loop: ");
generateNewSession((HttpServletRequest) request);
System.out.println("Generated new session for request ");
//chain.doFilter(request, response);
chain.doFilter(request, response);
return;
}
}
}
private void generateNewSession(HttpServletRequest httpRequest){
System.out.println("AuthenticationFilter generateNewSession enter");
HttpSession session = httpRequest.getSession();
HashMap<String, Object> old = new HashMap<String, Object>();
Enumeration<String> keys = (Enumeration<String>) session.getAttributeNames();
while (keys.hasMoreElements()) {
String key = keys.nextElement();
old.put(key, session.getAttribute(key));
}
//session invalidated
session.invalidate();
// get new session
session = httpRequest.getSession(true);
for (Map.Entry<String, Object> entry : old.entrySet()) {
session.setAttribute(entry.getKey(), entry.getValue());
}
System.out.println("AuthenticationFilter generateNewSession exit");
}
/**
* Called by the web container to indicate to a filter that it is being
* taken out of service.
*/
public void destroy() {
filterConfig = null;
}
}
| enabled debugging for dev - app scan changes
SVN-Revision: 5261
| software/camod/src/gov/nih/nci/camod/util/AuthenticationFilter.java | enabled debugging for dev - app scan changes | <ide><path>oftware/camod/src/gov/nih/nci/camod/util/AuthenticationFilter.java
<ide> //System.out.println("AuthenticationFilter loggedin= " + loggedin);
<ide> // reverse this property in application when this code works
<ide> if(loggedin != null && loggedin.equals("true")){
<del> //System.out.println("AuthenticationFilter set authorized = true: " );
<add> System.out.println("AuthenticationFilter set authorized = true: " );
<ide> authorized = true;
<ide> }
<ide> } |
|
Java | bsd-3-clause | 5a27ea9e8745955c1792e7769a05c0c782ac8360 | 0 | fujunwei/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,dednal/chromium.src,dednal/chromium.src,M4sse/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,Just-D/chromium-1,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,dednal/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chromecast.shell;
import org.chromium.base.PathUtils;
import org.chromium.base.ResourceExtractor;
import org.chromium.content.app.ContentApplication;
/**
* Entry point for the Android cast shell application. Handles initialization of information that
* needs to be shared across the main activity and the child services created.
*
* Note that this gets run for each process, including sandboxed child render processes. Child
* processes don't need most of the full "setup" performed in CastBrowserHelper.java, but they do
* require a few basic pieces (found here).
*/
public class CastApplication extends ContentApplication {
private static final String[] MANDATORY_PAK_FILES = new String[] {"cast_shell.pak"};
private static final String PRIVATE_DATA_DIRECTORY_SUFFIX = "cast_shell";
@Override
public void onCreate() {
super.onCreate();
initializeApplicationParameters();
}
public static void initializeApplicationParameters() {
ResourceExtractor.setMandatoryPaksToExtract(MANDATORY_PAK_FILES);
PathUtils.setPrivateDataDirectorySuffix(PRIVATE_DATA_DIRECTORY_SUFFIX);
}
}
| chromecast/shell/android/apk/src/org/chromium/chromecast/shell/CastApplication.java | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chromecast.shell;
import org.chromium.base.PathUtils;
import org.chromium.content.app.ContentApplication;
import org.chromium.content.browser.ResourceExtractor;
/**
* Entry point for the Android cast shell application. Handles initialization of information that
* needs to be shared across the main activity and the child services created.
*
* Note that this gets run for each process, including sandboxed child render processes. Child
* processes don't need most of the full "setup" performed in CastBrowserHelper.java, but they do
* require a few basic pieces (found here).
*/
public class CastApplication extends ContentApplication {
private static final String[] MANDATORY_PAK_FILES = new String[] {"cast_shell.pak"};
private static final String PRIVATE_DATA_DIRECTORY_SUFFIX = "cast_shell";
@Override
public void onCreate() {
super.onCreate();
initializeApplicationParameters();
}
public static void initializeApplicationParameters() {
ResourceExtractor.setMandatoryPaksToExtract(MANDATORY_PAK_FILES);
PathUtils.setPrivateDataDirectorySuffix(PRIVATE_DATA_DIRECTORY_SUFFIX);
}
}
| Android buildfix: ResourceExtractor moved to org.chromium.base
[email protected],[email protected]
BUG=None
Review URL: https://codereview.chromium.org/563323003
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#294698}
| chromecast/shell/android/apk/src/org/chromium/chromecast/shell/CastApplication.java | Android buildfix: ResourceExtractor moved to org.chromium.base | <ide><path>hromecast/shell/android/apk/src/org/chromium/chromecast/shell/CastApplication.java
<ide> package org.chromium.chromecast.shell;
<ide>
<ide> import org.chromium.base.PathUtils;
<add>import org.chromium.base.ResourceExtractor;
<ide> import org.chromium.content.app.ContentApplication;
<del>import org.chromium.content.browser.ResourceExtractor;
<ide>
<ide> /**
<ide> * Entry point for the Android cast shell application. Handles initialization of information that |
|
Java | apache-2.0 | ffa36f086dcce46aa72836314655cb2c526597a3 | 0 | petrushy/Orekit,CS-SI/Orekit,petrushy/Orekit,CS-SI/Orekit | /* Copyright 2002-2019 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.estimation.measurements.gnss;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.hipparchus.Field;
import org.hipparchus.analysis.differentiation.DSFactory;
import org.hipparchus.analysis.differentiation.DerivativeStructure;
import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
import org.orekit.estimation.measurements.AbstractMeasurement;
import org.orekit.estimation.measurements.EstimatedMeasurement;
import org.orekit.estimation.measurements.GroundStation;
import org.orekit.estimation.measurements.ObservableSatellite;
import org.orekit.frames.FieldTransform;
import org.orekit.propagation.SpacecraftState;
import org.orekit.time.AbsoluteDate;
import org.orekit.time.FieldAbsoluteDate;
import org.orekit.utils.Constants;
import org.orekit.utils.ParameterDriver;
import org.orekit.utils.TimeStampedFieldPVCoordinates;
import org.orekit.utils.TimeStampedPVCoordinates;
/** Class modeling a phase measurement from a ground station.
* <p>
* The measurement is considered to be a signal emitted from
* a spacecraft and received on a ground station.
* Its value is the number of cycles between emission and
* reception. The motion of both the station and the
* spacecraft during the signal flight time are taken into
* account. The date of the measurement corresponds to the
* reception on ground of the emitted signal.
* </p>
* @author Thierry Ceolin
* @author Luc Maisonobe
* @author Maxime Journot
* @since 9.2
*/
public class Phase extends AbstractMeasurement<Phase> {
/** Name for ambiguity driver. */
public static final String AMBIGUITY_NAME = "ambiguity";
/** Driver for ambiguity. */
private final ParameterDriver ambiguityDriver;
/** Ground station from which measurement is performed. */
private final GroundStation station;
/** Wavelength of the phase observed value [m]. */
private final double wavelength;
/** Simple constructor.
* @param station ground station from which measurement is performed
* @param date date of the measurement
* @param phase observed value (cycles)
* @param wavelength phase observed value wavelength (m)
* @param sigma theoretical standard deviation
* @param baseWeight base weight
* @param satellite satellite related to this measurement
* @since 9.3
*/
public Phase(final GroundStation station, final AbsoluteDate date,
final double phase, final double wavelength, final double sigma,
final double baseWeight, final ObservableSatellite satellite) {
super(date, phase, sigma, baseWeight, Arrays.asList(satellite));
ambiguityDriver = new ParameterDriver(AMBIGUITY_NAME,
0.0, 1.0,
Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
addParameterDriver(ambiguityDriver);
addParameterDriver(station.getClockOffsetDriver());
addParameterDriver(station.getEastOffsetDriver());
addParameterDriver(station.getNorthOffsetDriver());
addParameterDriver(station.getZenithOffsetDriver());
addParameterDriver(station.getPrimeMeridianOffsetDriver());
addParameterDriver(station.getPrimeMeridianDriftDriver());
addParameterDriver(station.getPolarOffsetXDriver());
addParameterDriver(station.getPolarDriftXDriver());
addParameterDriver(station.getPolarOffsetYDriver());
addParameterDriver(station.getPolarDriftYDriver());
this.station = station;
this.wavelength = wavelength;
}
/** Get the ground station from which measurement is performed.
* @return ground station from which measurement is performed
*/
public GroundStation getStation() {
return station;
}
/** Get the wavelength.
* @return wavelength (m)
*/
public double getWavelength() {
return wavelength;
}
/** {@inheritDoc} */
@Override
protected EstimatedMeasurement<Phase> theoreticalEvaluation(final int iteration,
final int evaluation,
final SpacecraftState[] states) {
final ObservableSatellite satellite = getSatellites().get(0);
final SpacecraftState state = states[satellite.getPropagatorIndex()];
// Phase derivatives are computed with respect to spacecraft state in inertial frame
// and station parameters
// ----------------------
//
// Parameters:
// - 0..2 - Position of the spacecraft in inertial frame
// - 3..5 - Velocity of the spacecraft in inertial frame
// - 6..n - station parameters (ambiguity, clock offset, station offsets, pole, prime meridian...)
int nbParams = 6;
final Map<String, Integer> indices = new HashMap<>();
for (ParameterDriver driver : getParametersDrivers()) {
if (driver.isSelected()) {
indices.put(driver.getName(), nbParams++);
}
}
final DSFactory factory = new DSFactory(nbParams, 1);
final Field<DerivativeStructure> field = factory.getDerivativeField();
final FieldVector3D<DerivativeStructure> zero = FieldVector3D.getZero(field);
// Coordinates of the spacecraft expressed as a derivative structure
final TimeStampedFieldPVCoordinates<DerivativeStructure> pvaDS = getCoordinates(state, 0, factory);
// transform between station and inertial frame, expressed as a derivative structure
// The components of station's position in offset frame are the 3 last derivative parameters
final FieldTransform<DerivativeStructure> offsetToInertialDownlink =
station.getOffsetToInertial(state.getFrame(), getDate(), factory, indices);
final FieldAbsoluteDate<DerivativeStructure> downlinkDateDS =
offsetToInertialDownlink.getFieldDate();
// Station position in inertial frame at end of the downlink leg
final TimeStampedFieldPVCoordinates<DerivativeStructure> stationDownlink =
offsetToInertialDownlink.transformPVCoordinates(new TimeStampedFieldPVCoordinates<>(downlinkDateDS,
zero, zero, zero));
// Compute propagation times
// (if state has already been set up to pre-compensate propagation delay,
// we will have delta == tauD and transitState will be the same as state)
// Downlink delay
final DerivativeStructure tauD = signalTimeOfFlight(pvaDS, stationDownlink.getPosition(), downlinkDateDS);
// Transit state & Transit state (re)computed with derivative structures
final DerivativeStructure delta = downlinkDateDS.durationFrom(state.getDate());
final DerivativeStructure deltaMTauD = tauD.negate().add(delta);
final SpacecraftState transitState = state.shiftedBy(deltaMTauD.getValue());
final TimeStampedFieldPVCoordinates<DerivativeStructure> transitStateDS = pvaDS.shiftedBy(deltaMTauD);
// prepare the evaluation
final EstimatedMeasurement<Phase> estimated =
new EstimatedMeasurement<Phase>(this, iteration, evaluation,
new SpacecraftState[] {
transitState
}, new TimeStampedPVCoordinates[] {
transitStateDS.toTimeStampedPVCoordinates(),
stationDownlink.toTimeStampedPVCoordinates()
});
// Phase value
final double cOverLambda = Constants.SPEED_OF_LIGHT / wavelength;
final DerivativeStructure ambiguity = ambiguityDriver.getValue(factory, indices);
final DerivativeStructure phase = tauD.multiply(cOverLambda).add(ambiguity);
estimated.setEstimatedValue(phase.getValue());
// Phase partial derivatives with respect to state
final double[] derivatives = phase.getAllDerivatives();
estimated.setStateDerivatives(0, Arrays.copyOfRange(derivatives, 1, 7));
// set partial derivatives with respect to parameters
// (beware element at index 0 is the value, not a derivative)
for (final ParameterDriver driver : getParametersDrivers()) {
final Integer index = indices.get(driver.getName());
if (index != null) {
estimated.setParameterDerivatives(driver, derivatives[index + 1]);
}
}
return estimated;
}
}
| src/main/java/org/orekit/estimation/measurements/gnss/Phase.java | /* Copyright 2002-2019 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.estimation.measurements.gnss;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.hipparchus.Field;
import org.hipparchus.analysis.differentiation.DSFactory;
import org.hipparchus.analysis.differentiation.DerivativeStructure;
import org.hipparchus.geometry.euclidean.threed.FieldVector3D;
import org.orekit.estimation.measurements.AbstractMeasurement;
import org.orekit.estimation.measurements.EstimatedMeasurement;
import org.orekit.estimation.measurements.GroundStation;
import org.orekit.estimation.measurements.ObservableSatellite;
import org.orekit.frames.FieldTransform;
import org.orekit.propagation.SpacecraftState;
import org.orekit.time.AbsoluteDate;
import org.orekit.time.FieldAbsoluteDate;
import org.orekit.utils.Constants;
import org.orekit.utils.ParameterDriver;
import org.orekit.utils.TimeStampedFieldPVCoordinates;
import org.orekit.utils.TimeStampedPVCoordinates;
/** Class modeling a phase measurement from a ground station.
* <p>
* The measurement is considered to be a signal emitted from
* a spacecraft and received on a ground station.
* Its value is the number of cycles between emission and
* reception. The motion of both the station and the
* spacecraft during the signal flight time are taken into
* account. The date of the measurement corresponds to the
* reception on ground of the emitted signal.
* </p>
* @author Thierry Ceolin
* @author Luc Maisonobe
* @author Maxime Journot
* @since 9.2
*/
public class Phase extends AbstractMeasurement<Phase> {
/** Name for ambiguity driver. */
public static final String AMBIGUITY_NAME = "ambiguity";
/** Driver for ambiguity. */
private final ParameterDriver ambiguityDriver;
/** Ground station from which measurement is performed. */
private final GroundStation station;
/** Wavelength of the phase observed value [m]. */
private final double wavelength;
/** Simple constructor.
* @param station ground station from which measurement is performed
* @param date date of the measurement
* @param phase observed value
* @param wavelength phase observed value wavelength (m)
* @param sigma theoretical standard deviation
* @param baseWeight base weight
* @param satellite satellite related to this measurement
* @since 9.3
*/
public Phase(final GroundStation station, final AbsoluteDate date,
final double phase, final double wavelength, final double sigma,
final double baseWeight, final ObservableSatellite satellite) {
super(date, phase, sigma, baseWeight, Arrays.asList(satellite));
ambiguityDriver = new ParameterDriver(AMBIGUITY_NAME,
0.0, 1.0,
Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
addParameterDriver(ambiguityDriver);
addParameterDriver(station.getClockOffsetDriver());
addParameterDriver(station.getEastOffsetDriver());
addParameterDriver(station.getNorthOffsetDriver());
addParameterDriver(station.getZenithOffsetDriver());
addParameterDriver(station.getPrimeMeridianOffsetDriver());
addParameterDriver(station.getPrimeMeridianDriftDriver());
addParameterDriver(station.getPolarOffsetXDriver());
addParameterDriver(station.getPolarDriftXDriver());
addParameterDriver(station.getPolarOffsetYDriver());
addParameterDriver(station.getPolarDriftYDriver());
this.station = station;
this.wavelength = wavelength;
}
/** Get the ground station from which measurement is performed.
* @return ground station from which measurement is performed
*/
public GroundStation getStation() {
return station;
}
/** Get the wavelength.
* @return wavelength (m)
*/
public double getWavelength() {
return wavelength;
}
/** {@inheritDoc} */
@Override
protected EstimatedMeasurement<Phase> theoreticalEvaluation(final int iteration,
final int evaluation,
final SpacecraftState[] states) {
final ObservableSatellite satellite = getSatellites().get(0);
final SpacecraftState state = states[satellite.getPropagatorIndex()];
// Phase derivatives are computed with respect to spacecraft state in inertial frame
// and station parameters
// ----------------------
//
// Parameters:
// - 0..2 - Position of the spacecraft in inertial frame
// - 3..5 - Velocity of the spacecraft in inertial frame
// - 6..n - station parameters (ambiguity, clock offset, station offsets, pole, prime meridian...)
int nbParams = 6;
final Map<String, Integer> indices = new HashMap<>();
for (ParameterDriver driver : getParametersDrivers()) {
if (driver.isSelected()) {
indices.put(driver.getName(), nbParams++);
}
}
final DSFactory factory = new DSFactory(nbParams, 1);
final Field<DerivativeStructure> field = factory.getDerivativeField();
final FieldVector3D<DerivativeStructure> zero = FieldVector3D.getZero(field);
// Coordinates of the spacecraft expressed as a derivative structure
final TimeStampedFieldPVCoordinates<DerivativeStructure> pvaDS = getCoordinates(state, 0, factory);
// transform between station and inertial frame, expressed as a derivative structure
// The components of station's position in offset frame are the 3 last derivative parameters
final FieldTransform<DerivativeStructure> offsetToInertialDownlink =
station.getOffsetToInertial(state.getFrame(), getDate(), factory, indices);
final FieldAbsoluteDate<DerivativeStructure> downlinkDateDS =
offsetToInertialDownlink.getFieldDate();
// Station position in inertial frame at end of the downlink leg
final TimeStampedFieldPVCoordinates<DerivativeStructure> stationDownlink =
offsetToInertialDownlink.transformPVCoordinates(new TimeStampedFieldPVCoordinates<>(downlinkDateDS,
zero, zero, zero));
// Compute propagation times
// (if state has already been set up to pre-compensate propagation delay,
// we will have delta == tauD and transitState will be the same as state)
// Downlink delay
final DerivativeStructure tauD = signalTimeOfFlight(pvaDS, stationDownlink.getPosition(), downlinkDateDS);
// Transit state & Transit state (re)computed with derivative structures
final DerivativeStructure delta = downlinkDateDS.durationFrom(state.getDate());
final DerivativeStructure deltaMTauD = tauD.negate().add(delta);
final SpacecraftState transitState = state.shiftedBy(deltaMTauD.getValue());
final TimeStampedFieldPVCoordinates<DerivativeStructure> transitStateDS = pvaDS.shiftedBy(deltaMTauD);
// prepare the evaluation
final EstimatedMeasurement<Phase> estimated =
new EstimatedMeasurement<Phase>(this, iteration, evaluation,
new SpacecraftState[] {
transitState
}, new TimeStampedPVCoordinates[] {
transitStateDS.toTimeStampedPVCoordinates(),
stationDownlink.toTimeStampedPVCoordinates()
});
// Phase value
final double cOverLambda = Constants.SPEED_OF_LIGHT / wavelength;
final DerivativeStructure ambiguity = ambiguityDriver.getValue(factory, indices);
final DerivativeStructure phase = tauD.multiply(cOverLambda).add(ambiguity);
estimated.setEstimatedValue(phase.getValue());
// Phase partial derivatives with respect to state
final double[] derivatives = phase.getAllDerivatives();
estimated.setStateDerivatives(0, Arrays.copyOfRange(derivatives, 1, 7));
// set partial derivatives with respect to parameters
// (beware element at index 0 is the value, not a derivative)
for (final ParameterDriver driver : getParametersDrivers()) {
final Integer index = indices.get(driver.getName());
if (index != null) {
estimated.setParameterDerivatives(driver, derivatives[index + 1]);
}
}
return estimated;
}
}
| Improved javadoc. | src/main/java/org/orekit/estimation/measurements/gnss/Phase.java | Improved javadoc. | <ide><path>rc/main/java/org/orekit/estimation/measurements/gnss/Phase.java
<ide> /** Simple constructor.
<ide> * @param station ground station from which measurement is performed
<ide> * @param date date of the measurement
<del> * @param phase observed value
<add> * @param phase observed value (cycles)
<ide> * @param wavelength phase observed value wavelength (m)
<ide> * @param sigma theoretical standard deviation
<ide> * @param baseWeight base weight |
|
Java | cc0-1.0 | 4664704f2855c4152ced63d82b27ac211da5b370 | 0 | C4K3/Misc | package net.simpvp.Misc;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.OfflinePlayer;
import org.bukkit.scheduler.BukkitRunnable;
import org.bukkit.Bukkit;
/**
* A command to let /everybody/ see when somebody first joined.
*/
public class FirstJoinedCommand implements CommandExecutor {
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
Player player = null;
if (sender instanceof Player)
player = (Player) sender;
if (args.length < 1 && player == null) {
Misc.instance.getLogger().info("You must specify a player to use this command.");
return true;
}
Player target;
if (args.length > 0) {
@SuppressWarnings("deprecation") /* Only used for online players */
Player tmp = Misc.instance.getServer().getPlayer(args[0]);
target = tmp;
} else {
target = player;
}
final UUID uuid;
if (player == null) {
uuid = null;
} else {
uuid = player.getUniqueId();
}
if (target == null) {
get_uuid(args[0], uuid);
} else {
send_result(uuid, target.getUniqueId(), target.getName());
}
return true;
}
private void get_uuid(final String name, final UUID caller_uuid) {
new BukkitRunnable() {
@Override
public void run() {
try {
URL url = new URL("https://api.mojang.com/users/profiles/minecraft/" + name);
URLConnection conn = url.openConnection();
conn.connect();
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String resp = reader.readLine();
if (resp == null)
resp = "";
Pattern p = Pattern.compile("\"id\":\"(\\S+?)\"");
Matcher m = p.matcher(resp);
m.find();
String uuid_str = m.group(1);
uuid_str = uuid_str.replaceAll("(\\w{8})(\\w{4})(\\w{4})(\\w{4})(\\w{12})", "$1-$2-$3-$4-$5");
final UUID uuid = UUID.fromString(uuid_str);
new BukkitRunnable() {
@Override
public void run() {
send_result(caller_uuid, uuid, name);
}
}.runTask(Misc.instance);
} catch (Exception e) {
new BukkitRunnable() {
@Override
public void run() {
send_result(caller_uuid, null, name);
}
}.runTask(Misc.instance);
}
}
}.runTaskAsynchronously(Misc.instance);
}
private void send_result(UUID uuid, UUID target_uuid, String target_name) {
String msg;
if (target_uuid == null) {
msg = "Error retrieving data of " + target_name + ".";
} else {
OfflinePlayer off_player = Misc.instance.getServer().getOfflinePlayer(target_uuid);
if (off_player.getFirstPlayed() == 0) {
msg = target_name + " has never played on this server.";
} else {
long days = ((System.currentTimeMillis() - off_player.getFirstPlayed())
/ (1000L * 60 * 60 * 24));
SimpleDateFormat sdf = new SimpleDateFormat("d MMMM yyyy");
msg = off_player.getName() + " first joined on "
+ ChatColor.AQUA + sdf.format(new Date(off_player.getFirstPlayed()))
+ ChatColor.GREEN + ", " + days + " day";
if (days != 1)
msg += "s";
msg += " ago. Note that for people who joined before 2012,"
+ " this command may show a later date than the true date.";
}
}
Player player = Misc.instance.getServer().getPlayer(uuid);
if (player == null) {
Misc.instance.getLogger().info(msg);
} else {
player.sendMessage(ChatColor.GREEN + msg);
}
}
}
| src/FirstJoinedCommand.java | package net.simpvp.Misc;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
/**
* A command to let /everybody/ see when somebody first joined.
*/
public class FirstJoinedCommand implements CommandExecutor {
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
Player player = null;
if (sender instanceof Player)
player = (Player) sender;
if (args.length < 1 && player == null) {
Misc.instance.getLogger().info("You must specify a player to use this command.");
return true;
}
Player target;
if (args.length > 0) {
@SuppressWarnings("deprecation") /* Only used for online players */
Player tmp = Misc.instance.getServer().getPlayer(args[0]);
target = tmp;
} else {
target = player;
}
if (target == null) {
if (player == null) {
Misc.instance.getLogger().info("Could not find the specified player.");
} else {
player.sendMessage(ChatColor.RED + "Could not find the specified player.");
}
return true;
}
long days = ((System.currentTimeMillis() - target.getFirstPlayed()) / (1000L * 60 * 60 * 24));
SimpleDateFormat sdf = new SimpleDateFormat("d MMMM yyyy");
String msg = target.getName() + " first joined on "
+ ChatColor.AQUA + sdf.format(new Date(target.getFirstPlayed()))
+ ChatColor.GREEN + ", " + days + " day";
if (days != 1)
msg += "s";
msg += " ago. Note that for people who joined before 2012,"
+ " this command may show a later date than the true date.";
if (player == null) {
Misc.instance.getLogger().info(msg);
} else {
player.sendMessage(ChatColor.GREEN + msg);
}
return true;
}
}
| Allow people to use /firstjoined on offline players
| src/FirstJoinedCommand.java | Allow people to use /firstjoined on offline players | <ide><path>rc/FirstJoinedCommand.java
<ide>
<ide> import java.text.SimpleDateFormat;
<ide> import java.util.Date;
<add>import java.util.UUID;
<add>import java.io.BufferedReader;
<add>import java.io.InputStreamReader;
<add>import java.net.URL;
<add>import java.net.URLConnection;
<add>import java.util.regex.Pattern;
<add>import java.util.regex.Matcher;
<ide>
<ide> import org.bukkit.ChatColor;
<ide> import org.bukkit.command.Command;
<ide> import org.bukkit.command.CommandExecutor;
<ide> import org.bukkit.command.CommandSender;
<ide> import org.bukkit.entity.Player;
<add>import org.bukkit.OfflinePlayer;
<add>import org.bukkit.scheduler.BukkitRunnable;
<add>import org.bukkit.Bukkit;
<ide>
<ide> /**
<ide> * A command to let /everybody/ see when somebody first joined.
<ide> target = player;
<ide> }
<ide>
<del> if (target == null) {
<del> if (player == null) {
<del> Misc.instance.getLogger().info("Could not find the specified player.");
<del> } else {
<del> player.sendMessage(ChatColor.RED + "Could not find the specified player.");
<del> }
<del>
<del> return true;
<add> final UUID uuid;
<add> if (player == null) {
<add> uuid = null;
<add> } else {
<add> uuid = player.getUniqueId();
<ide> }
<ide>
<del> long days = ((System.currentTimeMillis() - target.getFirstPlayed()) / (1000L * 60 * 60 * 24));
<del> SimpleDateFormat sdf = new SimpleDateFormat("d MMMM yyyy");
<del> String msg = target.getName() + " first joined on "
<del> + ChatColor.AQUA + sdf.format(new Date(target.getFirstPlayed()))
<del> + ChatColor.GREEN + ", " + days + " day";
<del> if (days != 1)
<del> msg += "s";
<del> msg += " ago. Note that for people who joined before 2012,"
<del> + " this command may show a later date than the true date.";
<add> if (target == null) {
<add> get_uuid(args[0], uuid);
<add>
<add> } else {
<add> send_result(uuid, target.getUniqueId(), target.getName());
<add> }
<add>
<add> return true;
<add> }
<add>
<add> private void get_uuid(final String name, final UUID caller_uuid) {
<add> new BukkitRunnable() {
<add> @Override
<add> public void run() {
<add> try {
<add> URL url = new URL("https://api.mojang.com/users/profiles/minecraft/" + name);
<add> URLConnection conn = url.openConnection();
<add> conn.connect();
<add> BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
<add> String resp = reader.readLine();
<add> if (resp == null)
<add> resp = "";
<add>
<add> Pattern p = Pattern.compile("\"id\":\"(\\S+?)\"");
<add> Matcher m = p.matcher(resp);
<add> m.find();
<add> String uuid_str = m.group(1);
<add> uuid_str = uuid_str.replaceAll("(\\w{8})(\\w{4})(\\w{4})(\\w{4})(\\w{12})", "$1-$2-$3-$4-$5");
<add>
<add> final UUID uuid = UUID.fromString(uuid_str);
<add> new BukkitRunnable() {
<add> @Override
<add> public void run() {
<add> send_result(caller_uuid, uuid, name);
<add> }
<add> }.runTask(Misc.instance);
<add>
<add> } catch (Exception e) {
<add> new BukkitRunnable() {
<add> @Override
<add> public void run() {
<add> send_result(caller_uuid, null, name);
<add> }
<add> }.runTask(Misc.instance);
<add> }
<add>
<add>
<add> }
<add> }.runTaskAsynchronously(Misc.instance);
<add> }
<add>
<add> private void send_result(UUID uuid, UUID target_uuid, String target_name) {
<add> String msg;
<add> if (target_uuid == null) {
<add> msg = "Error retrieving data of " + target_name + ".";
<add> } else {
<add> OfflinePlayer off_player = Misc.instance.getServer().getOfflinePlayer(target_uuid);
<add> if (off_player.getFirstPlayed() == 0) {
<add> msg = target_name + " has never played on this server.";
<add> } else {
<add>
<add> long days = ((System.currentTimeMillis() - off_player.getFirstPlayed())
<add> / (1000L * 60 * 60 * 24));
<add> SimpleDateFormat sdf = new SimpleDateFormat("d MMMM yyyy");
<add>
<add> msg = off_player.getName() + " first joined on "
<add> + ChatColor.AQUA + sdf.format(new Date(off_player.getFirstPlayed()))
<add> + ChatColor.GREEN + ", " + days + " day";
<add> if (days != 1)
<add> msg += "s";
<add> msg += " ago. Note that for people who joined before 2012,"
<add> + " this command may show a later date than the true date.";
<add> }
<add> }
<add>
<add> Player player = Misc.instance.getServer().getPlayer(uuid);
<ide>
<ide> if (player == null) {
<ide> Misc.instance.getLogger().info(msg);
<ide> player.sendMessage(ChatColor.GREEN + msg);
<ide> }
<ide>
<del> return true;
<ide> }
<ide>
<ide> } |
|
Java | apache-2.0 | 2c305c91744ff689fd4d69ecb50e6ebfb7f40948 | 0 | googleinterns/heartrate-bt-wear | package com.google.heartrate.wearos.app.bluetooth.server;
import android.bluetooth.le.AdvertiseCallback;
import android.bluetooth.le.AdvertiseData;
import android.bluetooth.le.AdvertiseSettings;
import android.bluetooth.le.BluetoothLeAdvertiser;
import android.content.Context;
import android.os.ParcelUuid;
import android.util.Log;
import com.google.heartrate.wearos.app.bluetooth.BluetoothUtils;
import com.google.heartrate.wearos.app.gatt.GattException;
import java.util.Set;
import java.util.UUID;
/**
* {@link BluetoothAdvertiser} is the wrapper over {@link BluetoothLeAdvertiser} class.
* <p>
* Advertising allows devices to broadcast information defining their intentions.
* Server uses advertising to allow clients to find it and, subsequently, to establish a connection with it.
* <p>
* A server can use {@link BluetoothAdvertiser} to advertise its existence and show it's complete or partial
* list of GATT services it offers.
*
*/
public class BluetoothAdvertiser {
private static final String TAG = BluetoothAdvertiser.class.getSimpleName();
/** {@link BluetoothAdvertiser} for advertising process. */
private final BluetoothLeAdvertiser bluetoothLeAdvertiser;
/** {@link AdvertiseCallback} to receive async responses from {@link BluetoothAdvertiser}. */
final AdvertiseCallback advertiseCallback = new AdvertiseCallback() {
@Override
public void onStartSuccess(AdvertiseSettings settingsInEffect) {
Log.i(TAG, "BluetoothLE Advertise Started.");
}
@Override
public void onStartFailure(int errorCode) {
Log.e(TAG, String.format("BluetoothLE Advertise Failed: %d", + errorCode));
}
};
public BluetoothAdvertiser(Context context) throws GattException {
bluetoothLeAdvertiser = BluetoothUtils.getBluetoothLeAdvertiser(context);
}
/**
* Start advertising to clients about server existence.
*
* @param serviceUuids list of GATT services server offers
*/
public void startAdvertisingServices(Set<UUID> serviceUuids) {
Log.d(TAG, "Starting advertising");
AdvertiseSettings.Builder advertiseSettings = new AdvertiseSettings.Builder()
/* Balanced between advertising frequency and power consumption */
.setAdvertiseMode(AdvertiseSettings.ADVERTISE_MODE_BALANCED)
.setTimeout(0)
/* Medium transmission (TX) power level */
.setTxPowerLevel(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM);
AdvertiseData.Builder advertiseData = new AdvertiseData.Builder()
.setIncludeDeviceName(true)
.setIncludeTxPowerLevel(false);
for (UUID serviceUuid : serviceUuids) {
advertiseData.addServiceUuid(new ParcelUuid(serviceUuid));
}
bluetoothLeAdvertiser.startAdvertising(
advertiseSettings.build(),
advertiseData.build(),
advertiseCallback);
}
/**
* Stop advertising to clients about server existence.
*/
public void stopAdvertisingServices() {
Log.d(TAG, "Stopping advertising");
bluetoothLeAdvertiser.stopAdvertising(advertiseCallback);
}
}
| server/app/src/main/java/com/google/heartrate/wearos/app/bluetooth/server/BluetoothAdvertiser.java | package com.google.heartrate.wearos.app.bluetooth.server;
import android.bluetooth.le.AdvertiseCallback;
import android.bluetooth.le.AdvertiseData;
import android.bluetooth.le.AdvertiseSettings;
import android.bluetooth.le.BluetoothLeAdvertiser;
import android.content.Context;
import android.os.ParcelUuid;
import android.util.Log;
import com.google.heartrate.wearos.app.bluetooth.BluetoothUtils;
import com.google.heartrate.wearos.app.gatt.GattException;
import java.util.Set;
import java.util.UUID;
/**
* {@link BluetoothAdvertiser} is the wrapper over {@link BluetoothLeAdvertiser} class.
* <p>
* Advertising allows devices to broadcast information defining their intentions.
* Server uses advertising to allow clients to find it and, subsequently, to establish a connection with it.
* <p>
* A server can use {@link BluetoothAdvertiser} to advertise its existence and show it's complete or partial
* list of GATT services it offers.
*
*/
public class BluetoothAdvertiser {
private static final String TAG = BluetoothAdvertiser.class.getSimpleName();
/** {@link BluetoothAdvertiser} for advertising process. */
private final BluetoothLeAdvertiser bluetoothLeAdvertiser;
/** Advertising callback to receive async responses from {@link BluetoothAdvertiser} . */
final AdvertiseCallback advertiseCallback = new AdvertiseCallback() {
@Override
public void onStartSuccess(AdvertiseSettings settingsInEffect) {
Log.i(TAG, "BluetoothLE Advertise Started.");
}
@Override
public void onStartFailure(int errorCode) {
Log.e(TAG, String.format("BluetoothLE Advertise Failed: %d", + errorCode));
}
};
public BluetoothAdvertiser(Context context) throws GattException {
bluetoothLeAdvertiser = BluetoothUtils.getBluetoothLeAdvertiser(context);
}
/**
* Start advertising to clients about server existence.
*
* @param serviceUuids list of GATT services server offers
*/
public void startAdvertisingServices(Set<UUID> serviceUuids) {
Log.d(TAG, "Starting advertising");
AdvertiseSettings.Builder advertiseSettings = new AdvertiseSettings.Builder()
/* Balanced between advertising frequency and power consumption */
.setAdvertiseMode(AdvertiseSettings.ADVERTISE_MODE_BALANCED)
.setTimeout(0)
/* Medium transmission (TX) power level */
.setTxPowerLevel(AdvertiseSettings.ADVERTISE_TX_POWER_MEDIUM);
AdvertiseData.Builder advertiseData = new AdvertiseData.Builder()
.setIncludeDeviceName(true)
.setIncludeTxPowerLevel(false);
for (UUID serviceUuid : serviceUuids) {
advertiseData.addServiceUuid(new ParcelUuid(serviceUuid));
}
bluetoothLeAdvertiser.startAdvertising(
advertiseSettings.build(),
advertiseData.build(),
advertiseCallback);
}
/**
* Stop advertising to clients about server existence.
*/
public void stopAdvertisingServices() {
Log.d(TAG, "Stopping advertising");
bluetoothLeAdvertiser.stopAdvertising(advertiseCallback);
}
}
| Fix advertise callback field comment. Add link.
| server/app/src/main/java/com/google/heartrate/wearos/app/bluetooth/server/BluetoothAdvertiser.java | Fix advertise callback field comment. Add link. | <ide><path>erver/app/src/main/java/com/google/heartrate/wearos/app/bluetooth/server/BluetoothAdvertiser.java
<ide> /** {@link BluetoothAdvertiser} for advertising process. */
<ide> private final BluetoothLeAdvertiser bluetoothLeAdvertiser;
<ide>
<del> /** Advertising callback to receive async responses from {@link BluetoothAdvertiser} . */
<add> /** {@link AdvertiseCallback} to receive async responses from {@link BluetoothAdvertiser}. */
<ide> final AdvertiseCallback advertiseCallback = new AdvertiseCallback() {
<ide> @Override
<ide> public void onStartSuccess(AdvertiseSettings settingsInEffect) { |
|
Java | apache-2.0 | 6f65a99422a55bc0ed959cba924d2b9d720db402 | 0 | apache/lenya,apache/lenya,apache/lenya,apache/lenya | /*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lenya.cms.observation;
import org.apache.lenya.ac.impl.AbstractAccessControlTest;
import org.apache.lenya.cms.cocoon.source.SourceUtil;
import org.apache.lenya.cms.publication.Document;
import org.apache.lenya.cms.publication.DocumentFactory;
import org.apache.lenya.cms.publication.DocumentManager;
import org.apache.lenya.cms.publication.DocumentUtil;
import org.apache.lenya.cms.publication.Publication;
import org.apache.lenya.cms.publication.PublicationUtil;
import org.apache.lenya.cms.repository.RepositoryUtil;
import org.apache.lenya.cms.repository.Session;
import org.apache.lenya.xml.NamespaceHelper;
public class ObservationTest extends AbstractAccessControlTest {
public void testObservation() throws Exception {
login("lenya");
Session session = RepositoryUtil.getSession(getManager(), getRequest());
DocumentFactory factory = DocumentUtil.createDocumentIdentityMap(getManager(), session);
Publication publication = PublicationUtil.getPublication(getManager(), "test");
Document doc = factory.get(publication, Publication.AUTHORING_AREA, "/index", "en");
TestListener docListener = new TestListener();
TestListener allListener = new TestListener();
ObservationRegistry registry = null;
try {
registry = (ObservationRegistry) getManager().lookup(ObservationRegistry.ROLE);
// check if it works if only the allListener is registered
registry.registerListener(allListener);
testChanged(doc, allListener);
registry.registerListener(docListener, doc);
Exception e = null;
try {
registry.registerListener(docListener, doc);
}
catch (ObservationException e1) {
e = e1;
}
assertNotNull(e);
testChanged(doc, docListener);
testChanged(doc, allListener);
}
finally {
if (registry != null) {
getManager().release(registry);
}
}
}
protected void testChanged(Document doc, TestListener listener) throws Exception {
listener.reset();
NamespaceHelper xml = new NamespaceHelper("http://apache.org/lenya/test", "", "test");
SourceUtil.writeDOM(xml.getDocument(), doc.getSourceURI(), getManager());
String mimeType = doc.getMimeType();
doc.setMimeType("");
doc.setMimeType(mimeType);
assertFalse(listener.wasChanged());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasChanged());
}
protected void testRemoved(Document doc, TestListener listener) throws Exception {
listener.reset();
DocumentManager docManager = null;
try {
docManager = (DocumentManager) getManager().lookup(DocumentManager.ROLE);
Document target = doc.getFactory().get(doc.getPublication(), doc.getArea(), "/testTarget", doc.getLanguage());
docManager.move(doc, target.getLocator());
assertFalse(listener.wasRemoved());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasRemoved());
docManager.move(target, doc.getLocator());
assertFalse(listener.wasChanged());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasChanged());
}
finally {
if (docManager != null) {
getManager().release(docManager);
}
}
}
}
| src/modules-core/observation-impl/java/test/org/apache/lenya/cms/observation/ObservationTest.java | /*
* Copyright 1999-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lenya.cms.observation;
import org.apache.lenya.ac.impl.AbstractAccessControlTest;
import org.apache.lenya.cms.cocoon.source.SourceUtil;
import org.apache.lenya.cms.publication.Document;
import org.apache.lenya.cms.publication.DocumentFactory;
import org.apache.lenya.cms.publication.DocumentManager;
import org.apache.lenya.cms.publication.DocumentUtil;
import org.apache.lenya.cms.publication.Publication;
import org.apache.lenya.cms.publication.PublicationUtil;
import org.apache.lenya.cms.repository.RepositoryUtil;
import org.apache.lenya.cms.repository.Session;
import org.apache.lenya.xml.NamespaceHelper;
public class ObservationTest extends AbstractAccessControlTest {
public void testObservation() throws Exception {
login("lenya");
Session session = RepositoryUtil.getSession(getManager(), getRequest());
DocumentFactory factory = DocumentUtil.createDocumentIdentityMap(getManager(), session);
Publication publication = PublicationUtil.getPublication(getManager(), "test");
Document doc = factory.get(publication, Publication.AUTHORING_AREA, "/index", "en");
TestListener docListener = new TestListener();
TestListener allListener = new TestListener();
ObservationRegistry registry = null;
try {
registry = (ObservationRegistry) getManager().lookup(ObservationRegistry.ROLE);
// check if it works if only the allListener is registered
registry.registerListener(allListener);
testChanged(doc, allListener);
registry.registerListener(docListener, doc);
Exception e = null;
try {
registry.registerListener(docListener, doc);
}
catch (ObservationException e1) {
e = e1;
}
assertNotNull(e);
testChanged(doc, docListener);
testChanged(doc, allListener);
}
finally {
if (registry != null) {
getManager().release(registry);
}
}
}
protected void testChanged(Document doc, TestListener listener) throws Exception {
listener.reset();
NamespaceHelper xml = new NamespaceHelper("http://apache.org/lenya/test", "", "test");
doc.getRepositoryNode().lock();
SourceUtil.writeDOM(xml.getDocument(), doc.getSourceURI(), getManager());
String mimeType = doc.getMimeType();
doc.setMimeType("");
doc.setMimeType(mimeType);
assertFalse(listener.wasChanged());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasChanged());
}
protected void testRemoved(Document doc, TestListener listener) throws Exception {
listener.reset();
DocumentManager docManager = null;
try {
docManager = (DocumentManager) getManager().lookup(DocumentManager.ROLE);
Document target = doc.getFactory().get(doc.getPublication(), doc.getArea(), "/testTarget", doc.getLanguage());
docManager.move(doc, target.getLocator());
assertFalse(listener.wasRemoved());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasRemoved());
docManager.move(target, doc.getLocator());
assertFalse(listener.wasChanged());
doc.getRepositoryNode().getSession().commit();
Thread.currentThread().sleep(100);
assertTrue(listener.wasChanged());
}
finally {
if (docManager != null) {
getManager().release(docManager);
}
}
}
}
| Observation test: don't lock document before writing
git-svn-id: c334bb69c16d150e1b06e84516f7aa90b3181ca2@429339 13f79535-47bb-0310-9956-ffa450edef68
| src/modules-core/observation-impl/java/test/org/apache/lenya/cms/observation/ObservationTest.java | Observation test: don't lock document before writing | <ide><path>rc/modules-core/observation-impl/java/test/org/apache/lenya/cms/observation/ObservationTest.java
<ide> protected void testChanged(Document doc, TestListener listener) throws Exception {
<ide> listener.reset();
<ide> NamespaceHelper xml = new NamespaceHelper("http://apache.org/lenya/test", "", "test");
<del> doc.getRepositoryNode().lock();
<ide> SourceUtil.writeDOM(xml.getDocument(), doc.getSourceURI(), getManager());
<ide>
<ide> String mimeType = doc.getMimeType(); |
|
Java | apache-2.0 | 37d2f9852c27642542e992ba7e4dee95d4771513 | 0 | mythguided/hydra,mythguided/hydra,mythguided/hydra,mythguided/hydra | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.job;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.text.ParseException;
import com.addthis.basis.net.HttpUtil;
import com.addthis.basis.util.Bytes;
import com.addthis.basis.util.Files;
import com.addthis.basis.util.JitterClock;
import com.addthis.basis.util.Parameter;
import com.addthis.basis.util.Strings;
import com.addthis.basis.util.TokenReplacerOverflowException;
import com.addthis.bark.ZkClientFactory;
import com.addthis.bark.ZkHelpers;
import com.addthis.codec.Codec;
import com.addthis.codec.CodecJSON;
import com.addthis.hydra.job.backup.ScheduledBackupType;
import com.addthis.hydra.job.mq.CommandTaskDelete;
import com.addthis.hydra.job.mq.CommandTaskKick;
import com.addthis.hydra.job.mq.CommandTaskReplicate;
import com.addthis.hydra.job.mq.CommandTaskRevert;
import com.addthis.hydra.job.mq.CommandTaskStop;
import com.addthis.hydra.job.mq.CoreMessage;
import com.addthis.hydra.job.mq.HostCapacity;
import com.addthis.hydra.job.mq.HostMessage;
import com.addthis.hydra.job.mq.HostState;
import com.addthis.hydra.job.mq.JobKey;
import com.addthis.hydra.job.mq.ReplicaTarget;
import com.addthis.hydra.job.mq.StatusTaskBackup;
import com.addthis.hydra.job.mq.StatusTaskBegin;
import com.addthis.hydra.job.mq.StatusTaskCantBegin;
import com.addthis.hydra.job.mq.StatusTaskEnd;
import com.addthis.hydra.job.mq.StatusTaskPort;
import com.addthis.hydra.job.mq.StatusTaskReplica;
import com.addthis.hydra.job.mq.StatusTaskReplicate;
import com.addthis.hydra.job.mq.StatusTaskRevert;
import com.addthis.hydra.job.spawn.JobAlert;
import com.addthis.hydra.job.spawn.JobAlertRunner;
import com.addthis.hydra.job.spawn.SpawnService;
import com.addthis.hydra.job.store.DataStoreUtil;
import com.addthis.hydra.job.store.JobStore;
import com.addthis.hydra.job.store.SpawnDataStore;
import com.addthis.hydra.query.AliasBiMap;
import com.addthis.hydra.query.WebSocketManager;
import com.addthis.hydra.task.run.TaskExitState;
import com.addthis.hydra.util.DirectedGraph;
import com.addthis.hydra.util.SettableGauge;
import com.addthis.maljson.JSONArray;
import com.addthis.maljson.JSONException;
import com.addthis.maljson.JSONObject;
import com.addthis.meshy.MeshyClient;
import com.addthis.meshy.service.file.FileReference;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
import com.yammer.metrics.core.Gauge;
import com.yammer.metrics.core.Meter;
import org.I0Itec.zkclient.ZkClient;
import org.codehaus.jackson.map.ObjectMapper;
import org.eclipse.jetty.server.Server;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.MINION_DEAD_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.MINION_UP_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_BALANCE_PARAM_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_ALERT_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_COMMAND_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_MACRO_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_QUEUE_PATH;
import jsr166e.ConcurrentHashMapV8;
/**
* manages minions running on remote notes. runs master http server to
* communicate with and control those instances.
*/
public class Spawn implements Codec.Codable {
private static Logger log = LoggerFactory.getLogger(Spawn.class);
private static String httpHost = Parameter.value("spawn.localhost");
private static String clusterName = Parameter.value("cluster.name", "localhost");
private static String queryHttpHost = Parameter.value("spawn.queryhost");
private static int webPort = Parameter.intValue("spawn.http.port", 5050);
private static int requestHeaderBufferSize = Parameter.intValue("spawn.http.bufsize", 8192);
private static int hostStatusRequestInterval = Parameter.intValue("spawn.status.interval", 10000);
private static int queueKickInterval = Parameter.intValue("spawn.queue.kick.interval", 6000);
private static String debugOverride = Parameter.value("spawn.debug");
private static final boolean useStructuredLogger = Parameter.boolValue("spawn.logger.bundle.enable",
clusterName.equals("localhost")); // default to true if-and-only-if we are running local stack
private static final Codec codec = new CodecJSON();
private static final Counter quiesceCount = Metrics.newCounter(Spawn.class, "quiesced");
private static final SettableGauge<Integer> runningTaskCount = SettableGauge.newSettableGauge(Spawn.class, "runningTasks", 0);
private static final SettableGauge<Integer> queuedTaskCount = SettableGauge.newSettableGauge(Spawn.class, "queuedTasks", 0);
private static final SettableGauge<Integer> failTaskCount = SettableGauge.newSettableGauge(Spawn.class, "failedTasks", 0);
private static final Meter tasksStartedPerHour = Metrics.newMeter(Spawn.class, "tasksStartedPerHour", "tasksStartedPerHour", TimeUnit.HOURS);
private static final Meter tasksCompletedPerHour = Metrics.newMeter(Spawn.class, "tasksCompletedPerHour", "tasksCompletedPerHour", TimeUnit.HOURS);
private static final SettableGauge<Integer> runningJobCount = SettableGauge.newSettableGauge(Spawn.class, "runningJobs", 0);
private static final SettableGauge<Integer> queuedJobCount = SettableGauge.newSettableGauge(Spawn.class, "queuedJobs", 0);
private static final SettableGauge<Integer> failJobCount = SettableGauge.newSettableGauge(Spawn.class, "failedJobs", 0);
private static final SettableGauge<Integer> hungJobCount = SettableGauge.newSettableGauge(Spawn.class, "hungJobs", 0);
private static final Meter jobsStartedPerHour = Metrics.newMeter(Spawn.class, "jobsStartedPerHour", "jobsStartedPerHour", TimeUnit.HOURS);
private static final Meter jobsCompletedPerHour = Metrics.newMeter(Spawn.class, "jobsCompletedPerHour", "jobsCompletedPerHour", TimeUnit.HOURS);
public static final String SPAWN_DATA_DIR = Parameter.value("SPAWN_DATA_DIR", "./data");
public static final String SPAWN_STRUCTURED_LOG_DIR = Parameter.value("spawn.logger.bundle.dir", "./log/spawn-stats");
// thread pool for running chore actions that we do not want running in the main thread of Spawn
private final ExecutorService choreExecutor = MoreExecutors.getExitingExecutorService(
new ThreadPoolExecutor(1, 4, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(), new ThreadFactoryBuilder().setNameFormat("choreExecutor-%d").build()));
// thread pool for expanding jobs and sending kick messages (outside of the main application threads)
// - thread pool size of 10 chosen somewhat arbitrarily, most job expansions should be nearly instantaneous
// - max queue size of 5000 was chosen as a generous upper bound for how many tasks may be queued at once (since the number of scheduled kicks is limited by queue size)
private final LinkedBlockingQueue<Runnable> expandKickQueue = new LinkedBlockingQueue<>(5000);
private final ExecutorService expandKickExecutor = MoreExecutors.getExitingExecutorService(
new ThreadPoolExecutor(10, 10, 0L, TimeUnit.MILLISECONDS, expandKickQueue,
new ThreadFactoryBuilder().setNameFormat("jobExpander-%d").build()));
private final ScheduledExecutorService scheduledExecutor = MoreExecutors.getExitingScheduledExecutorService(
new ScheduledThreadPoolExecutor(2, new ThreadFactoryBuilder().setNameFormat("spawnScheduledTask-%d").build()));
private final Gauge<Integer> expandQueueGauge = Metrics.newGauge(Spawn.class, "expandKickExecutorQueue", new Gauge<Integer>() {
public Integer value() {
return expandKickQueue.size();
}
});
private final HostFailWorker hostFailWorker;
public static void main(String args[]) throws Exception {
Spawn spawn = new Spawn(
new File(args.length > 0 ? args[0] : "etc"),
new File(args.length > 1 ? args[1] : "web")
);
new SpawnService(spawn).start();
}
private final File dataDir;
private final ConcurrentHashMap<String, ClientEventListener> listeners;
@Codec.Set(codable = true)
private String uuid;
@Codec.Set(codable = true)
private String debug;
@Codec.Set(codable = true)
private String queryHost;
@Codec.Set(codable = true)
private String spawnHost;
@Codec.Set(codable = true)
private int queryPort = 2222;
@Codec.Set(codable = true)
private boolean quiesce;
@Codec.Set(codable = true)
private final HashSet<String> disabledHosts = new HashSet<>();
private int choreCleanerInterval = Parameter.intValue("spawn.chore.interval", 10000);
private static final int CHORE_TTL = Parameter.intValue("spawn.chore.ttl", 60 * 60 * 24 * 1000);
private static final int TASK_QUEUE_DRAIN_INTERVAL = Parameter.intValue("task.queue.drain.interval", 500);
private static final boolean ENABLE_JOB_STORE = Parameter.boolValue("job.store.enable", true);
private static final boolean ENABLE_JOB_FIXDIRS_ONCOMPLETE = Parameter.boolValue("job.fixdirs.oncomplete", true);
private final ConcurrentHashMap<String, HostState> monitored;
private final SpawnState spawnState = new SpawnState();
private final SpawnMesh spawnMesh;
private final SpawnFormattedLogger spawnFormattedLogger;
private ZkClient zkClient;
private SpawnMQ spawnMQ;
private Server jetty;
private JobConfigManager jobConfigManager;
private SetMembershipListener minionMembers;
private SetMembershipListener deadMinionMembers;
private AliasBiMap aliasBiMap;
private boolean useZk = true;
private final String stateFilePath = Parameter.value("spawn.state.file", "spawn.state");
private Gauge<Integer> minionsDown = Metrics.newGauge(Spawn.class, "minionsDown", new Gauge<Integer>() {
public Integer value() {
int total = 0;
if (monitored != null) {
synchronized (monitored) {
total = monitored.size();
}
}
int up = minionMembers == null ? 0 : minionMembers.getMemberSetSize();
int dead = deadMinionMembers == null ? 0 : deadMinionMembers.getMemberSetSize();
return total - up - dead;
}
});
private SpawnBalancer balancer;
private SpawnQueuesByPriority taskQueuesByPriority = new SpawnQueuesByPriority();
private volatile int lastQueueSize = 0;
private final Lock jobLock = new ReentrantLock();
private final AtomicBoolean shuttingDown = new AtomicBoolean(false);
private final LinkedBlockingQueue<String> jobUpdateQueue = new LinkedBlockingQueue<>();
private final SpawnJobFixer spawnJobFixer = new SpawnJobFixer(this);
private JobAlertRunner jobAlertRunner;
private JobStore jobStore;
private SpawnDataStore spawnDataStore;
//To track web socket connections
private final WebSocketManager webSocketManager = new WebSocketManager();
/**
* default constructor used for testing purposes only
*/
@VisibleForTesting
public Spawn() throws Exception {
this(false);
}
@VisibleForTesting
public Spawn(boolean zk) throws Exception {
this.dataDir = Files.initDirectory(SPAWN_DATA_DIR);
this.listeners = new ConcurrentHashMap<>();
this.monitored = new ConcurrentHashMap<>();
this.useZk = zk;
this.spawnFormattedLogger = useStructuredLogger ?
SpawnFormattedLogger.createFileBasedLogger(new File(SPAWN_STRUCTURED_LOG_DIR)) :
SpawnFormattedLogger.createNullLogger();
if (zk) {
log.info("[init] starting zkclient, config manager, and listening for minions");
this.zkClient = ZkClientFactory.makeStandardClient();
this.spawnDataStore = DataStoreUtil.makeSpawnDataStore(zkClient);
this.jobConfigManager = new JobConfigManager(this.spawnDataStore);
this.minionMembers = new SetMembershipListener(MINION_UP_PATH, true);
this.deadMinionMembers = new SetMembershipListener(MINION_DEAD_PATH, false);
}
this.hostFailWorker = new HostFailWorker(this);
this.balancer = new SpawnBalancer(this);
this.spawnMesh = new SpawnMesh(this);
}
private Spawn(File dataDir, File webDir) throws Exception {
getSettings().setQuiesced(quiesce);
this.dataDir = Files.initDirectory(dataDir);
this.monitored = new ConcurrentHashMap<>();
this.listeners = new ConcurrentHashMap<>();
this.spawnFormattedLogger = useStructuredLogger ?
SpawnFormattedLogger.createFileBasedLogger(new File(SPAWN_STRUCTURED_LOG_DIR)) :
SpawnFormattedLogger.createNullLogger();
this.zkClient = ZkClientFactory.makeStandardClient();
this.spawnDataStore = DataStoreUtil.makeSpawnDataStore(zkClient);
File statefile = new File(dataDir, stateFilePath);
if (statefile.exists() && statefile.isFile()) {
codec.decode(this, Files.read(statefile));
}
this.queryHost = (queryHttpHost != null ? queryHttpHost : InetAddress.getLocalHost().getHostAddress()) + ":" + queryPort;
this.spawnHost = (httpHost != null ? httpHost : InetAddress.getLocalHost().getHostAddress()) + ":" + webPort;
if (uuid == null) {
uuid = UUID.randomUUID().toString();
log.warn("[init] uuid was null, creating new one: " + uuid);
}
if (debugOverride != null) {
debug = debugOverride;
}
// look for local object to import
log.info("[init] beginning to load stats from data store");
loadMacros();
loadCommands();
loadSpawnQueue();
this.jobConfigManager = new JobConfigManager(spawnDataStore);
// fix up null pointers
for (Job job : spawnState.jobs.values()) {
if (job.getSubmitTime() == null) {
job.setSubmitTime(System.currentTimeMillis());
}
}
loadJobs();
// register jvm shutdown hook to clean up resources
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
runtimeShutdownHook();
}
});
// connect to message broker or fail
log.info("[init] connecting to message queue");
this.spawnMQ = new SpawnMQImpl(zkClient, this);
this.minionMembers = new SetMembershipListener(MINION_UP_PATH, true);
this.deadMinionMembers = new SetMembershipListener(MINION_DEAD_PATH, false);
this.aliasBiMap = new AliasBiMap(spawnDataStore);
aliasBiMap.loadCurrentValues();
hostFailWorker = new HostFailWorker(this);
balancer = new SpawnBalancer(this);
loadSpawnBalancerConfig();
this.spawnMQ.connectToMQ(uuid);
// request hosts to send their status
Timer timer = new Timer(true);
timer.schedule(new TimerTask() {
@Override
public void run() {
requestHostsUpdate();
}
}, hostStatusRequestInterval, hostStatusRequestInterval);
Timer taskQueueTimer = new Timer(true);
taskQueueTimer.schedule(new TimerTask() {
@Override
public void run() {
kickJobsOnQueue();
writeSpawnQueue();
}
}, queueKickInterval, queueKickInterval);
Timer taskUpdateQueueDrainer = new Timer(true);
taskUpdateQueueDrainer.schedule(new TimerTask() {
@Override
public void run() {
drainJobTaskUpdateQueue();
}
}, TASK_QUEUE_DRAIN_INTERVAL, TASK_QUEUE_DRAIN_INTERVAL);
//Start JobAlertManager
this.jobAlertRunner = new JobAlertRunner(this);
// start job scheduler
scheduledExecutor.scheduleWithFixedDelay(new UpdateEventRunnable(), 0, 1, TimeUnit.MINUTES);
scheduledExecutor.scheduleWithFixedDelay(new JobRekickTask(), 0, 500, TimeUnit.MILLISECONDS);
// start http commands listener(s)
startSpawnWeb(dataDir, webDir);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
jetty.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
});
// connect to mesh
this.spawnMesh = new SpawnMesh(this);
balancer.startAutobalanceTask();
balancer.startTaskSizePolling();
if (ENABLE_JOB_STORE) {
jobStore = new JobStore(new File(dataDir, "jobstore"));
}
}
private void writeState() {
try {
File statefile = new File(dataDir, stateFilePath);
Files.write(statefile, codec.encode(this), false);
} catch (Exception e) {
log.warn("WARNING: failed to write spawn state to log file at " + stateFilePath);
}
}
public void markHostsForFailure(String hostId, boolean fileSystemDead) {
hostFailWorker.markHostsToFail(hostId, fileSystemDead);
}
public void unmarkHostsForFailure(String hostIds) {
hostFailWorker.removeHostsForFailure(hostIds);
}
public HostFailWorker getHostFailWorker() {
return hostFailWorker;
}
public SpawnBalancer getSpawnBalancer() {
return balancer;
}
public static String getHttpHost() {
return httpHost;
}
public void acquireJobLock() {
jobLock.lock();
}
public void releaseJobLock() {
jobLock.unlock();
}
private void startSpawnWeb(File dataDir, File webDir) throws Exception {
log.info("[init] starting http server");
SpawnHttp http = new SpawnHttp(this, webDir);
new SpawnManager().register(http);
jetty = new Server(webPort);
jetty.getConnectors()[0].setRequestBufferSize(65535);
jetty.getConnectors()[0].setRequestHeaderSize(requestHeaderBufferSize);
jetty.setHandler(http);
jetty.start();
}
public String getUuid() {
return uuid;
}
public MeshyClient getMeshyClient() {
return spawnMesh.getClient();
}
public ZkClient getZkClient() {
return zkClient;
}
private void closeZkClients() {
if (spawnDataStore != null) {
spawnDataStore.close();
}
if (zkClient != null) {
zkClient.close();
}
}
public void setSpawnMQ(SpawnMQ spawnMQ) {
this.spawnMQ = spawnMQ;
}
private void loadMacros() throws Exception {
Map<String, String> loadedMacros = spawnDataStore.getAllChildren(SPAWN_COMMON_MACRO_PATH);
if (loadedMacros == null) {
return;
}
for (Entry<String, String> macroEntry : loadedMacros.entrySet()) {
String jsonMacro = macroEntry.getValue();
if (jsonMacro != null && !jsonMacro.equals("null") && !jsonMacro.isEmpty()) {
JobMacro macro = new JobMacro();
codec.decode(macro, jsonMacro.getBytes());
putMacro(macroEntry.getKey(), macro, false);
}
}
}
// TODO: It should be possible to reduce duplication between how commands and macros are handled.
@VisibleForTesting
protected void loadCommands() throws Exception {
Map<String, String> loadedCommands = spawnDataStore.getAllChildren(SPAWN_COMMON_COMMAND_PATH);
if (loadedCommands == null) {
return;
}
for (Entry<String, String> commandEntry : loadedCommands.entrySet()) {
String jsonCommand = commandEntry.getValue();
if (jsonCommand != null && !jsonCommand.equals("null") && !jsonCommand.isEmpty()) {
JobCommand command = new JobCommand();
codec.decode(command, jsonCommand.getBytes());
putCommand(commandEntry.getKey(), command, false);
}
}
}
@VisibleForTesting
protected void loadSpawnQueue() throws Exception {
String queueFromZk = spawnDataStore.get(SPAWN_QUEUE_PATH);
if (queueFromZk == null) {
return;
}
try {
taskQueuesByPriority = new ObjectMapper().readValue(queueFromZk, SpawnQueuesByPriority.class);
} catch (Exception ex) {
log.warn("[task.queue] exception during spawn queue deserialization: ", ex);
}
}
protected void writeSpawnQueue() {
ObjectMapper om = new ObjectMapper();
try {
taskQueuesByPriority.lock();
try {
spawnDataStore.put(SPAWN_QUEUE_PATH, new String(om.writeValueAsBytes(taskQueuesByPriority)));
} finally {
taskQueuesByPriority.unlock();
}
} catch (Exception ex) {
log.warn("[task.queue] exception during spawn queue serialization: " + ex, ex);
}
}
@VisibleForTesting
protected void loadJobs() {
if (jobConfigManager != null) {
jobLock.lock();
try {
for (IJob iJob : jobConfigManager.getJobs().values()) {
if (iJob != null) {
putJobInSpawnState(new Job(iJob));
}
}
} finally {
jobLock.unlock();
}
}
Thread loadDependencies = new Thread() {
@Override
public void run() {
Set<String> jobIds = spawnState.jobs.keySet();
for (String jobId : jobIds) {
IJob job = getJob(jobId);
if (job != null) {
updateJobDependencies(job);
}
}
}
};
loadDependencies.setDaemon(true);
loadDependencies.start();
}
// -------------------- BEGIN API ---------------------
public Settings getSettings() {
return new Settings();
}
public Map<String, List<String>> getAliases() {
return aliasBiMap.viewAliasMap();
}
public void addAlias(String alias, List<String> jobs) {
if (jobs.size() > 0) {
aliasBiMap.putAlias(alias, jobs);
} else {
log.warn("Ignoring empty jobs addition for alias: " + alias);
}
}
public void deleteAlias(String alias) {
aliasBiMap.deleteAlias(alias);
}
public ClientEventListener getClientEventListener(String id) {
ClientEventListener listener = listeners.get(id);
if (listener == null) {
listener = new ClientEventListener();
listeners.put(id, listener);
}
listener.lastSeen = System.currentTimeMillis();
return listener;
}
public HostState getHostState(String hostUuid) {
synchronized (monitored) {
return monitored.get(hostUuid);
}
}
public HostState markHostStateDead(String hostUUID) {
HostState state = getHostState(hostUUID);
if (state != null) {
state.setDead(true);
state.setUpdated();
if (useZk) {
// delete minion state
ZkHelpers.deletePath(zkClient, Minion.MINION_ZK_PATH + hostUUID);
ZkHelpers.makeSurePersistentPathExists(zkClient, MINION_DEAD_PATH + "/" + hostUUID);
}
sendHostUpdateEvent(state);
updateHostState(state);
}
return state;
}
protected void updateHostState(HostState state) {
synchronized (monitored) {
if (deadMinionMembers == null || !deadMinionMembers.getMemberSet().contains(state.getHostUuid())) {
if (log.isDebugEnabled()) {
log.debug("Updating host state for : " + state.getHost());
}
monitored.put(state.getHostUuid(), state);
}
}
}
/**
* List all hosts belonging to a particular minion type.
*
* @param minionType The minion type to find. If null, return all hosts.
* @return A list of hoststates
*/
public List<HostState> listHostStatus(String minionType) {
synchronized (monitored) {
Set<String> availableMinions = minionMembers == null ? ImmutableSet.<String>of() : minionMembers.getMemberSet();
Set<String> deadMinions = deadMinionMembers == null ? ImmutableSet.<String>of() : deadMinionMembers.getMemberSet();
ArrayList<HostState> allMinions = new ArrayList<>();
for (HostState minion : monitored.values()) {
if (availableMinions.contains(minion.getHostUuid()) && !deadMinions.contains(minion.getHostUuid())) {
minion.setUp(true);
} else {
minion.setUp(false);
}
if (minionType == null || minion.hasType(minionType)) {
allMinions.add(minion);
}
}
return allMinions;
}
}
public Set<String> listMinionTypes() {
Set<String> rv = new HashSet<>();
synchronized (monitored) {
for (HostState minion : monitored.values()) {
rv.add(minion.getMinionTypes());
}
}
return rv;
}
public Collection<String> listAvailableHostIds() {
return minionMembers.getMemberSet();
}
public void requestHostsUpdate() {
try {
spawnMQ.sendControlMessage(new HostState(HostMessage.ALL_HOSTS));
} catch (Exception e) {
log.warn("unable to request host state update: " + e);
}
}
public Set<String> getDataSources(String jobId) {
HashSet<String> dataSources = new HashSet<>();
Job job = this.getJob(jobId);
if (job == null || job.getParameters() == null) {
return dataSources;
}
jobLock.lock();
try {
for (JobParameter param : job.getParameters()) {
String value = param.getValue();
if (Strings.isEmpty(value)) {
value = param.getDefaultValue();
}
if (value != null && spawnState.jobs.containsKey(value)) {
dataSources.add(value);
}
}
} finally {
jobLock.unlock();
}
return dataSources;
}
public DirectedGraph<String> getJobDependencies() {
return spawnState.jobDependencies;
}
//* returns the jobs that depend on a given job. dependency is established if the job's ID is used as a job parameter
public Collection<Job> listDependentJobs(String jobId) {
ArrayList<Job> dependents = new ArrayList<>();
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
for (JobParameter param : job.getParameters()) {
if (param.getValue() != null && param.getValue().equals(jobId)) {
dependents.add(job);
break;
}
}
}
return dependents;
} finally {
jobLock.unlock();
}
}
public void buildDependencyFlowGraph(FlowGraph graph, String jobId) {
graph.addFlow(jobId);
Collection<Job> jobDeps = this.listDependentJobs(jobId);
for (Job jobDep : jobDeps) {
graph.addFlow(jobId, jobDep.getId());
buildDependencyFlowGraph(graph, jobDep.getId());
}
}
/**
* Gets the backup times for a given job and node of all backup types by using MeshyClient. If the nodeId is -1 it will
* get the backup times for all nodes.
*
* @return Set of date time mapped by backup type in reverse chronological order
* @throws IOException thrown if mesh client times out, ParseException thrown if filename does not meet valid format
*/
public Map<ScheduledBackupType, SortedSet<Long>> getJobBackups(String jobUUID, int nodeId) throws IOException, ParseException {
Map<ScheduledBackupType, SortedSet<Long>> fileDates = new HashMap<ScheduledBackupType, SortedSet<Long>>();
for (ScheduledBackupType backupType : ScheduledBackupType.getBackupTypes().values()) { //ignore types with symlink (like gold)
//if(backupType.getSymlinkName()==null)
//{
final String typePrefix = "*/" + jobUUID + "/" + ((nodeId < 0) ? "*" : Integer.toString(nodeId)) + "/" + backupType.getPrefix() + "*";
List<FileReference> files = new ArrayList<FileReference>(spawnMesh.getClient().listFiles(new String[]{typePrefix}));//meshyClient.listFiles(new String[] {typePrefix}));
fileDates.put(backupType, new TreeSet<Long>(Collections.reverseOrder()));
for (FileReference file : files) {
String filename = file.name.split("/")[4];
fileDates.get(backupType).add(backupType.parseDateFromName(filename).getTime());
}
//}
}
return fileDates;
}
public boolean isSpawnMeshAvailable() {
return spawnMesh.getClient() != null;
}
public void deleteHost(String hostuuid) {
synchronized (monitored) {
HostState state = monitored.remove(hostuuid);
if (state != null) {
log.info("Deleted host " + hostuuid);
sendHostUpdateEvent("host.delete", state);
} else {
log.warn("Attempted to delete host " + hostuuid + "But it was not found");
}
}
}
public Collection<Job> listJobs() {
ArrayList<Job> clones = new ArrayList<>(spawnState.jobs.size());
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
clones.add(job);
}
return clones;
} finally {
jobLock.unlock();
}
}
public Collection<Job> listJobsConcurrentImmutable() {
return Collections.unmodifiableCollection(spawnState.jobs.values());
}
public JSONArray getTaskQueueAsJSONArray() {
taskQueuesByPriority.lock();
try {
JSONArray jsonArray = new JSONArray();
for (Integer priority : taskQueuesByPriority.keySet()) {
Map<String, Object> jobToTaskMap = new HashMap<>();
LinkedList<SpawnQueueItem> jobQueue = taskQueuesByPriority.get(priority);
for (JobKey jobkey : jobQueue) {
JobTask jobtask = getTask(jobkey.getJobUuid(), jobkey.getNodeNumber());
String hostStr = "";
hostStr += jobtask.getHostUUID() + " ";
for (JobTaskReplica jobTaskReplica : jobtask.getReplicas()) {
hostStr += jobTaskReplica.getHostUUID() + " ";
}
HashMap<String, Object> taskHostMap = (HashMap<String, Object>) jobToTaskMap.get(jobkey.getJobUuid());
if (taskHostMap == null) {
taskHostMap = new HashMap<>();
}
taskHostMap.put(Integer.toString(jobtask.getTaskID()), hostStr);
jobToTaskMap.put(jobkey.getJobUuid(), taskHostMap);
}
JSONObject jobResult = new JSONObject(jobToTaskMap);
jsonArray.put(jobResult);
}
return jsonArray;
} finally {
taskQueuesByPriority.unlock();
}
}
public int getTaskQueuedCount() {
return lastQueueSize;
}
public Job getJob(String jobUUID) {
if (jobUUID == null) {
return null;
}
jobLock.lock();
try {
return spawnState.jobs.get(jobUUID);
} finally {
jobLock.unlock();
}
}
public void setJobConfig(String jobUUID, String config) throws Exception {
jobConfigManager.setConfig(jobUUID, config);
}
public String getJobConfig(String jobUUID) {
if (jobUUID == null) {
return null;
}
jobLock.lock();
try {
return jobConfigManager.getConfig(jobUUID);
} finally {
jobLock.unlock();
}
}
public Job putJobInSpawnState(Job job) {
if (job == null) {
return null;
}
// Null out the job config before inserting to reduce the amount stored in memory.
// Calling getJob will fill it back in -- or call jobConfigManager.getConfig(id)
job.setConfig(null);
return spawnState.jobs.put(job.getId(), job);
}
public Job getJob(JobKey jobKey) {
String jobUUID = jobKey.getJobUuid();
return getJob(jobUUID);
}
public JSONArray getJobHistory(String jobId) {
return jobStore != null ? jobStore.getHistory(jobId) : new JSONArray();
}
public String getJobHistoricalConfig(String jobId, String commitId) {
return jobStore != null ? jobStore.fetchHistoricalConfig(jobId, commitId) : null;
}
public String diff(String jobId, String commitId) {
return jobStore != null ? jobStore.getDiff(jobId, commitId) : null;
}
public Job createJob(String creator, int taskCount, Collection<String> taskHosts, String minionType, String command) throws Exception {
jobLock.lock();
try {
Job job = new Job(UUID.randomUUID().toString(), creator != null ? creator : "anonymous");
job.setOwner(job.getCreator());
job.setState(JobState.IDLE);
job.setCommand(command);
job.setDailyBackups(1);
job.setWeeklyBackups(1);
job.setMonthlyBackups(1);
job.setHourlyBackups(1);
job.setReplicas(1);
job.setMinionType(minionType);
List<HostState> hostStates = getOrCreateHostStateList(minionType, taskHosts);
List<JobTask> tasksAssignedToHosts = balancer.generateAssignedTasksForNewJob(job.getId(), taskCount, hostStates);
job.setTasks(tasksAssignedToHosts);
for (JobTask task : tasksAssignedToHosts) {
HostState host = getHostState(task.getHostUUID());
if (host == null) {
throw new Exception("Unable to allocate job tasks because no suitable host was found");
}
host.addJob(job.getId());
}
putJobInSpawnState(job);
if (jobConfigManager != null) {
jobConfigManager.addJob(job);
}
submitConfigUpdate(job.getId(), null);
return job;
} finally {
jobLock.unlock();
}
}
public boolean synchronizeJobState(String jobUUID) {
if (jobUUID == null) {
throw new NullPointerException("missing job uuid");
}
if (jobUUID.equals("ALL")) {
Collection<Job> jobList = listJobs();
for (Job job : jobList) {
if (!synchronizeSingleJob(job.getId())) {
log.warn("Stopping synchronize all jobs to to failure synchronizing job: " + job.getId());
return false;
}
}
return true;
} else {
return synchronizeSingleJob(jobUUID);
}
}
private boolean synchronizeSingleJob(String jobUUID) {
Job job = getJob(jobUUID);
if (job == null) {
log.warn("[job.synchronize] job uuid " + jobUUID + " not found");
return false;
}
ObjectMapper mapper = new ObjectMapper();
for (JobTask task : job.getCopyOfTasks()) {
String taskHost = task.getHostUUID();
if (deadMinionMembers.getMemberSet().contains(taskHost)) {
log.warn("task is currently assigned to a dead minion, need to check job: " + job.getId() + " host/node:" + task.getHostUUID() + "/" + task.getTaskID());
continue;
}
String hostStateString = ZkHelpers.readData(zkClient, Minion.MINION_ZK_PATH + taskHost);
HostState hostState;
try {
hostState = mapper.readValue(hostStateString, HostState.class);
} catch (IOException e) {
log.warn("Unable to deserialize host state for host: " + hostStateString + " serialized string was\n" + hostStateString);
return false;
}
boolean matched = matchJobNodeAndId(jobUUID, task, hostState.getRunning(), hostState.getStopped(), hostState.getQueued());
if (!matched) {
log.warn("Spawn thinks job: " + jobUUID + " node:" + task.getTaskID() + " is running on host: " + hostState.getHost() + " but that host disagrees.");
if (matchJobNodeAndId(jobUUID, task, hostState.getReplicas())) {
log.warn("Host: " + hostState.getHost() + " has a replica for the task/node: " + jobUUID + "/" + task.getTaskID() + " promoting replica");
try {
rebalanceReplicas(job);
} catch (Exception e) {
log.warn("Exception promoting replica during job synchronization on host: " + taskHost + " job/node" + job.getId() + "/" + job.getId());
}
} else {
log.warn("Host: " + hostState.getHost() + " does NOT have a replica for the task/node: " + jobUUID + "/" + task.getTaskID());
}
} else {
log.warn("Spawn and minion agree, job/node: " + jobUUID + "/" + task.getTaskID() + " is on host: " + hostState.getHost());
}
}
return true;
}
private boolean matchJobNodeAndId(String jobUUID, JobTask task, JobKey[]... jobKeys) {
for (JobKey[] jobKeyArray : jobKeys) {
for (JobKey jobKey : jobKeyArray) {
if (jobKey == null) {
log.warn("runningJob was null, this shouldn't happen");
continue;
} else if (jobKey.getJobUuid() == null) {
log.warn("JobUUID for jobKey: " + jobKey + " was null");
continue;
} else if (jobKey.getNodeNumber() == null) {
log.warn("NodeNumber for jobKey: " + jobKey + " was null");
continue;
}
if (jobKey.getJobUuid().equals(jobUUID) && jobKey.getNodeNumber().equals(task.getTaskID())) {
return true;
}
}
}
return false;
}
public List<HostState> getLiveHostsByReadOnlyStatus(String minionType, boolean readonly) {
List<HostState> allHosts = listHostStatus(minionType);
List<HostState> rv = new ArrayList<>(allHosts.size());
for (HostState host : allHosts) {
if (host.isUp() && !host.isDead() && host.isReadOnly() == readonly) {
rv.add(host);
}
}
return rv;
}
/**
* Reallocate some of a job's tasks to different hosts, hopefully improving its performance.
*
* @param jobUUID The ID of the job
* @param tasksToMove The number of tasks to move. If <= 0, use the default.
* @param autobalance Whether the reallocation was triggered by autobalance logic, in which case smaller limits are used.
* @return a list of move assignments that were attempted
*/
public List<JobTaskMoveAssignment> reallocateJob(String jobUUID, int tasksToMove, boolean readonly, boolean autobalance) {
Job job;
if (jobUUID == null || (job = getJob(jobUUID)) == null) {
throw new NullPointerException("invalid job uuid");
}
if (job.getState() != JobState.IDLE) {
log.warn("[job.reallocate] can't reallocate non-idle job");
return null;
}
List<JobTaskMoveAssignment> assignments = balancer.getAssignmentsForJobReallocation(job, tasksToMove, getLiveHostsByReadOnlyStatus(job.getMinionType(), readonly));
executeReallocationAssignments(assignments, false);
return assignments;
}
/**
* Promote a task to live on one of its replica hosts, demoting the existing live to a replica.
*
* @param jobUUID job ID
* @param node task #
* @param replicaHostID The host holding the replica that should be promoted
* @param kickOnComplete Whether to kick the task after the move is complete
* @param ignoreQuiesce Whether the kick can ignore quiesce (because it's a manual kick that was submitted while spawn was quiesced)
* @return true on success
*/
public boolean swapTask(String jobUUID, int node, String replicaHostID, boolean kickOnComplete, boolean ignoreQuiesce) {
JobTask task = getTask(jobUUID, node);
if (task == null) {
log.warn("[task.swap] received null task for " + jobUUID);
return false;
}
if (!checkHostStatesForSwap(task.getJobKey(), task.getHostUUID(), replicaHostID, true)) {
log.warn("[swap.task.stopped] failed; exiting");
return false;
}
Job job;
jobLock.lock();
try {
job = getJob(jobUUID);
task.replaceReplica(replicaHostID, task.getHostUUID());
task.setHostUUID(replicaHostID);
queueJobTaskUpdateEvent(job);
} finally {
jobLock.unlock();
}
if (kickOnComplete) {
try {
scheduleTask(job, task, expandJob(job));
} catch (Exception e) {
log.warn("Warning: failed to kick task " + task.getJobKey() + " with: " + e, e);
}
}
return true;
}
/**
* Get a replacement host for a new task
*
* @param job The job for the task to be reassigned
* @return A replacement host ID
*/
private String getReplacementHost(Job job) {
List<HostState> hosts = getLiveHostsByReadOnlyStatus(job.getMinionType(), false);
for (HostState host : hosts) {
if (host.canMirrorTasks()) {
return host.getHostUuid();
}
}
return hosts.get(0).getHostUuid();
}
/**
* Given a new task, replace any hosts that are down/disabled to ensure that it can kick
*
* @param task The task to modify
* @return True if at least one host was removed
*/
private boolean replaceDownHosts(JobTask task) {
Job job = getJob(task.getJobKey());
if (job == null) {
return false;
}
HostState host = getHostState(task.getHostUUID());
boolean changed = false;
if (host == null || !host.canMirrorTasks()) {
task.setHostUUID(getReplacementHost(job));
changed = true;
}
if (task.getReplicas() != null) {
List<JobTaskReplica> tempReplicas = new ArrayList<>(task.getReplicas());
for (JobTaskReplica replica : tempReplicas) {
HostState replicaHost = getHostState(replica.getHostUUID());
if (replicaHost == null || !replicaHost.canMirrorTasks()) {
changed = true;
task.setReplicas(removeReplicasForHost(replica.getHostUUID(), task.getReplicas()));
}
}
}
if (changed) {
try {
this.rebalanceReplicas(job, false);
updateJob(job);
} catch (Exception ex) {
log.warn("Failed to sent replication message for new task " + task.getJobKey() + ": " + ex, ex);
return false;
}
}
return changed;
}
/**
* Check whether it is acceptable to swap a task between two hosts
*
* @param key The task to consider swapping
* @param liveHostID The current host for the task
* @param replicaHostID The potential target host to check
* @return True if both hosts are up and have the appropriate task directory
*/
private boolean checkHostStatesForSwap(JobKey key, String liveHostID, String replicaHostID, boolean checkTargetReplica) {
if (key == null || liveHostID == null || replicaHostID == null) {
log.warn("[task.swap] failed due to null input");
return false;
}
JobTask task = getTask(key.getJobUuid(), key.getNodeNumber());
if (task == null) {
log.warn("[task.swap] failed: nonexistent task/replicas");
return false;
}
HostState liveHost = getHostState(liveHostID);
HostState replicaHost = getHostState(replicaHostID);
if (liveHost == null || replicaHost == null || liveHost.isDead() || !liveHost.isUp() || replicaHost.isDead() || !replicaHost.isUp()) {
log.warn("[task.swap] failed due to invalid host states for " + liveHostID + "," + replicaHostID);
return false;
}
if (checkTargetReplica && !isNewTask(task)) {
if (!replicaHost.hasLive(key)) {
log.warn("[task.swap] failed because the replica host " + replicaHostID + " does not have a complete replica of task " + key);
return false;
}
}
return true;
}
/**
* Push or pull tasks off of a host to balance its load with the rest of the cluster.
*
* @param hostUUID The ID of the host
* @return a boolean describing if at least one task was scheduled to be moved
*/
public RebalanceOutcome rebalanceHost(String hostUUID) {
if (hostUUID == null || !monitored.containsKey(hostUUID)) {
return new RebalanceOutcome(hostUUID, "missing host", null, null);
}
HostState host = monitored.get(hostUUID);
boolean readOnly = host.isReadOnly();
log.warn("[job.reallocate] starting reallocation for host: " + hostUUID + " host is " + (readOnly ? "" : "not") + " a read only host");
List<JobTaskMoveAssignment> assignments = balancer.getAssignmentsToBalanceHost(host, getLiveHostsByReadOnlyStatus(host.getMinionTypes(), host.isReadOnly()));
executeReallocationAssignments(assignments, false);
return new RebalanceOutcome(hostUUID, null, null, Strings.join(assignments.toArray(), "\n"));
}
/**
* Sanity-check a series of task move assignments coming from SpawnBalancer, then execute the sensible ones.
*
* @param assignments The assignments to execute
* @param limitToAvailableSlots Whether movements should honor their host's availableTaskSlots count
* @return The number of tasks that were actually moved
*/
public int executeReallocationAssignments(List<JobTaskMoveAssignment> assignments, boolean limitToAvailableSlots) {
int numExecuted = 0;
if (assignments == null) {
return numExecuted;
}
HashSet<String> jobsNeedingUpdate = new HashSet<>();
HashSet<String> hostsAlreadyMovingTasks = new HashSet<>();
for (JobTaskMoveAssignment assignment : assignments) {
if (assignment.delete()) {
log.warn("[job.reallocate] deleting " + assignment.getJobKey() + " off " + assignment.getSourceUUID());
deleteTask(assignment.getJobKey().getJobUuid(), assignment.getSourceUUID(), assignment.getJobKey().getNodeNumber(), false);
deleteTask(assignment.getJobKey().getJobUuid(), assignment.getSourceUUID(), assignment.getJobKey().getNodeNumber(), true);
} else {
String sourceHostID = assignment.getSourceUUID();
String targetHostID = assignment.getTargetUUID();
HostState targetHost = getHostState(targetHostID);
if (sourceHostID == null || targetHostID == null || sourceHostID.equals(targetHostID) || targetHost == null) {
log.warn("[job.reallocate] received invalid host assignment: from " + sourceHostID + " to " + targetHostID);
continue;
}
JobTask task = getTask(assignment.getJobKey());
Job job = getJob(task.getJobUUID());
if (job == null || job.getCopyOfTasks() == null || job.getCopyOfTasks().isEmpty()) {
log.warn("[job.reallocate] invalid or empty job");
continue;
}
if (assignment.promote()) {
log.warn("[job.reallocate] promoting " + task.getJobKey() + " on " + sourceHostID);
task.setHostUUID(sourceHostID);
List<JobTaskReplica> replicasToModify = targetHost.isReadOnly() ? task.getReadOnlyReplicas() : task.getReplicas();
removeReplicasForHost(sourceHostID, replicasToModify);
replicasToModify.add(new JobTaskReplica(targetHostID, task.getJobUUID(), task.getRunCount(), 0l));
swapTask(task.getJobUUID(), task.getTaskID(), sourceHostID, false, false);
jobsNeedingUpdate.add(task.getJobUUID());
} else {
HostState liveHost = getHostState(task.getHostUUID());
if (limitToAvailableSlots && liveHost != null && (liveHost.getAvailableTaskSlots() == 0 || hostsAlreadyMovingTasks.contains(task.getHostUUID()))) {
continue;
}
hostsAlreadyMovingTasks.add(task.getHostUUID());
JobKey key = task.getJobKey();
log.warn("[job.reallocate] replicating task " + key + " onto " + targetHostID + " as " + (assignment.isFromReplica() ? "replica" : "live"));
TaskMover tm = new TaskMover(this, key, targetHostID, sourceHostID, false);
tm.execute(false);
numExecuted++;
}
}
}
for (String jobUUID : jobsNeedingUpdate) {
try {
updateJob(getJob(jobUUID));
} catch (Exception ex) {
log.warn("WARNING: failed to update job " + jobUUID + ": " + ex, ex);
}
}
return numExecuted;
}
/**
* A method to ensure all live/replicas exist where they should, and optimize their locations if all directories are correct
*
* @param jobUUID The job id to rebalance
* @param tasksToMove The number of tasks to move. If < 0, use the default.
* @return a RebalanceOutcome describing which steps were performed
* @throws Exception If there is a failure when rebalancing replicas
*/
public RebalanceOutcome rebalanceJob(String jobUUID, int tasksToMove) throws Exception {
Job job = getJob(jobUUID);
if (jobUUID == null || job == null) {
log.warn("[job.rebalance] job uuid " + jobUUID + " not found");
return new RebalanceOutcome(jobUUID, "job not found", null, null);
}
if (job.getState() != JobState.IDLE && job.getState() != JobState.DEGRADED) {
log.warn("[job.rebalance] job must be IDLE or DEGRADED to rebalance " + jobUUID);
return new RebalanceOutcome(jobUUID, "job not idle/degraded", null, null);
}
// First, make sure each task has claimed all the replicas it should have
if (!rebalanceReplicas(job)) {
log.warn("[job.rebalance] failed to fill out replica assignments for " + jobUUID);
return new RebalanceOutcome(jobUUID, "couldn't fill out replicas", null, null);
}
try {
List<JobTaskDirectoryMatch> allMismatches = new ArrayList<>();
// Check each task to see if any live/replica directories are missing or incorrectly placed
for (JobTask task : job.getCopyOfTasks()) {
List<JobTaskDirectoryMatch> directoryMismatches = matchTaskToDirectories(task, false);
if (!directoryMismatches.isEmpty()) {
// If there are issues with a task's directories, resolve them.
resolveJobTaskDirectoryMatches(job, task, directoryMismatches, false);
allMismatches.addAll(directoryMismatches);
}
}
updateJob(job);
// If any mismatches were found, skip the optimization step
if (!allMismatches.isEmpty()) {
return new RebalanceOutcome(jobUUID, null, Strings.join(allMismatches.toArray(), "\n"), null);
} else {
// If all tasks had all expected directories, consider moving some tasks to better hosts
return new RebalanceOutcome(jobUUID, null, null, Strings.join(reallocateJob(jobUUID, tasksToMove, false, false).toArray(), "\n"));
}
} catch (Exception ex) {
log.warn("[job.rebalance] exception during rebalance for " + jobUUID, ex);
return new RebalanceOutcome(jobUUID, "exception during rebalancing: " + ex, null, null);
}
}
/**
* For a particular task, ensure all live/replica copies exist where they should
*
* @param jobId The job id to fix
* @param node The task id to fix, or -1 to fix all
* @param ignoreTaskState Whether to ignore the task's state (mostly when recovering from a host failure)
* @param orphansOnly Whether to only delete orphans for idle tasks
* @return A string description
*/
public String fixTaskDir(String jobId, int node, boolean ignoreTaskState, boolean orphansOnly) {
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return "Null job";
}
int numChanged = 0;
List<JobTask> tasks = node < 0 ? job.getCopyOfTasks() : Arrays.asList(job.getTask(node));
for (JobTask task : tasks) {
boolean shouldModifyTask = !spawnJobFixer.haveRecentlyFixedTask(task.getJobKey()) &&
(ignoreTaskState || (task.getState() == JobTaskState.IDLE || (!orphansOnly && task.getState() == JobTaskState.ERROR)));
if (log.isDebugEnabled()) {
log.debug("[fixTaskDir] considering modifying task " + task.getJobKey() + " shouldModifyTask=" + shouldModifyTask);
}
if (shouldModifyTask) {
try {
numChanged += resolveJobTaskDirectoryMatches(job, task, matchTaskToDirectories(task, false), orphansOnly) ? 1 : 0;
spawnJobFixer.markTaskRecentlyFixed(task.getJobKey());
} catch (Exception ex) {
log.warn("fixTaskDir exception " + ex, ex);
return "fixTaskDir exception (see log for more details): " + ex;
}
}
}
return "Changed " + numChanged + " tasks";
} finally {
jobLock.unlock();
}
}
public boolean resolveJobTaskDirectoryMatches(Job job, JobTask task, List<JobTaskDirectoryMatch> matches, boolean deleteOrphansOnly) throws Exception {
boolean changed = false;
for (JobTaskDirectoryMatch match : matches) {
boolean resolvedMissingLive = false;
switch (match.getType()) {
case MATCH:
continue;
case MISMATCH_MISSING_LIVE:
if (deleteOrphansOnly) {
continue;
}
changed = true;
resolveMissingLive(task);
resolvedMissingLive = true; // Only need to resolve missing live once, since all replicas will be recopied
break;
case ORPHAN_LIVE:
changed = true;
sendControlMessage(new CommandTaskDelete(match.getHostId(), job.getId(), task.getTaskID(), job.getRunCount()));
break;
default:
continue;
}
if (resolvedMissingLive) {
break;
}
}
return changed;
}
/**
* Handle the case where no living host has a copy of a task. Promote a replica if there is one, or recreate the task otherwise.
*
* @param task The task to modify.
*/
private void resolveMissingLive(JobTask task) {
HostState liveHost = getHostState(task.getHostUUID());
if (liveHost != null && liveHost.hasLive(task.getJobKey())) {
copyTaskToReplicas(task);
return;
}
boolean succeeded = false;
List<JobTaskReplica> replicas = task.getReplicas();
if (replicas != null && !replicas.isEmpty()) {
HostState host;
for (JobTaskReplica replica : replicas) {
host = replica != null ? getHostState(replica.getHostUUID()) : null;
if (host != null && host.canMirrorTasks() && !host.isReadOnly() && host.hasLive(task.getJobKey())) {
log.warn("[job.rebalance] promoting host " + host.getHostUuid() + " as live for " + task.getJobKey());
task.replaceReplica(host.getHostUuid(), task.getHostUUID());
task.setHostUUID(host.getHostUuid());
copyTaskToReplicas(task);
succeeded = true;
break;
}
}
}
if (!succeeded && getHostState(task.getHostUUID()) == null) {
// If no replica is found and the host doesn't exist, we must recreate the task somewhere else.
recreateTask(task);
}
}
private void copyTaskToReplicas(JobTask task) {
sendControlMessage(new CommandTaskReplicate(task.getHostUUID(), task.getJobUUID(), task.getTaskID(), getTaskReplicaTargets(task, task.getReplicas()), null, null, false));
}
private void recreateTask(JobTask task) {
Job job = getJob(task.getJobUUID());
Map<JobTask, String> assignmentMap = balancer.assignTasksFromMultipleJobsToHosts(Arrays.asList(task), getOrCreateHostStateList(job.getMinionType(), null));
if (assignmentMap != null && assignmentMap.containsKey(task)) {
String newHostUUID = assignmentMap.get(task);
log.warn("[job.rebalance] assigning new host for " + task.getJobUUID() + ":" + task.getTaskID() + " all data on previous host will be lost");
task.setHostUUID(newHostUUID);
task.resetTaskMetrics();
} else {
log.warn("[job.rebalance] unable to assign new host for " + task.getJobUUID() + ":" + task.getTaskID() + " could not find suitable host");
}
}
public String checkTaskDirText(String jobId, int node) {
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return "NULL JOB";
}
StringBuilder sb = new StringBuilder();
List<JobTask> tasks = node < 0 ? new ArrayList<>(job.getCopyOfTasksSorted()) : Arrays.asList(job.getTask(node));
sb.append("Directory check for job " + job.getId() + "\n");
for (JobTask task : tasks) {
sb.append("Task " + task.getTaskID() + ": " + matchTaskToDirectories(task, true) + "\n");
}
return sb.toString();
} finally {
jobLock.unlock();
}
}
public JSONArray checkTaskDirJSON(String jobId, int node) {
JSONArray resultList = new JSONArray();
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return resultList;
}
List<JobTask> tasks = node < 0 ? new ArrayList<>(job.getCopyOfTasksSorted()) : Arrays.asList(job.getTask(node));
for (JobTask task : tasks) {
List<JobTaskDirectoryMatch> taskMatches = matchTaskToDirectories(task, true);
for (JobTaskDirectoryMatch taskMatch : taskMatches) {
JSONObject jsonObject = CodecJSON.encodeJSON(taskMatch);
resultList.put(jsonObject);
}
}
} catch (Exception ex) {
log.warn("Error: checking dirs for job: " + jobId + ", node: " + node);
} finally {
jobLock.unlock();
}
return resultList;
}
public List<JobTaskDirectoryMatch> matchTaskToDirectories(JobTask task, boolean includeCorrect) {
List<JobTaskDirectoryMatch> rv = new ArrayList<>();
JobTaskDirectoryMatch match = checkHostForTask(task, task.getHostUUID());
if (includeCorrect || match.getType() != JobTaskDirectoryMatch.MatchType.MATCH) {
rv.add(match);
}
if (task.getAllReplicas() != null) {
for (JobTaskReplica replica : task.getAllReplicas()) {
match = checkHostForTask(task, replica.getHostUUID());
if (match.getType() != JobTaskDirectoryMatch.MatchType.MATCH) {
if (task.getState() == JobTaskState.REPLICATE || task.getState() == JobTaskState.FULL_REPLICATE) {
// If task is replicating, it will temporarily look like it's missing on the target host. Make this visible to the UI.
rv.add(new JobTaskDirectoryMatch(JobTaskDirectoryMatch.MatchType.REPLICATE_IN_PROGRESS, match.getJobKey(), match.getHostId()));
} else {
rv.add(match);
}
}
else if (includeCorrect) {
rv.add(match);
}
}
}
rv.addAll(findOrphansForTask(task));
return rv;
}
private JobTaskDirectoryMatch checkHostForTask(JobTask task, String hostID) {
JobTaskDirectoryMatch.MatchType type;
HostState host = getHostState(hostID);
if (host == null || !host.hasLive(task.getJobKey())) {
type = JobTaskDirectoryMatch.MatchType.MISMATCH_MISSING_LIVE;
} else {
type = JobTaskDirectoryMatch.MatchType.MATCH;
}
return new JobTaskDirectoryMatch(type, task.getJobKey(), hostID);
}
private List<JobTaskDirectoryMatch> findOrphansForTask(JobTask task) {
List<JobTaskDirectoryMatch> rv = new ArrayList<>();
Job job = getJob(task.getJobUUID());
if (job == null) {
log.warn("got find orphans request for missing job " + task.getJobUUID());
return rv;
}
Set<String> expectedTaskHosts = task.getAllTaskHosts();
for (HostState host : listHostStatus(job.getMinionType())) {
if (host == null || !host.isUp() || host.isDead() || host.getHostUuid().equals(task.getRebalanceTarget())) {
continue;
}
if (!expectedTaskHosts.contains(host.getHostUuid())) {
JobTaskDirectoryMatch.MatchType type = null;
if (host.hasLive(task.getJobKey()) || host.hasIncompleteReplica(task.getJobKey())) {
type = JobTaskDirectoryMatch.MatchType.ORPHAN_LIVE;
}
if (type != null) {
rv.add(new JobTaskDirectoryMatch(type, task.getJobKey(), host.getHostUuid()));
}
}
}
return rv;
}
private class TaskMover {
/**
* This class moves a task from a source host to a target host.
* If the target host already had a replica of the task, that
* replica is removed so the task will make a new replica somewhere
* else.
*/
private final JobKey taskKey;
private final String targetHostUUID;
private final String sourceHostUUID;
private HostState targetHost;
private Job job;
private JobTask task;
private boolean kickOnComplete;
private boolean isMigration;
private final Spawn spawn;
TaskMover(Spawn spawn, JobKey taskKey, String targetHostUUID, String sourceHostUUID, boolean kickOnComplete) {
this.spawn = spawn;
this.taskKey = taskKey;
this.targetHostUUID = targetHostUUID;
this.sourceHostUUID = sourceHostUUID;
this.kickOnComplete = kickOnComplete;
}
public void setMigration(boolean isMigration) {
this.isMigration = isMigration;
}
public String choreWatcherKey() {
return targetHostUUID + "&&&" + taskKey;
}
private void startReplicate() throws Exception {
ReplicaTarget[] target = new ReplicaTarget[]{
new ReplicaTarget(
targetHostUUID,
targetHost.getHost(),
targetHost.getUser(),
targetHost.getPath(),
task.getReplicationFactor())
};
job.setSubmitCommand(getCommand(job.getCommand()));
JobCommand jobcmd = job.getSubmitCommand();
CommandTaskReplicate replicate = new CommandTaskReplicate(
task.getHostUUID(), task.getJobUUID(), task.getTaskID(), target, Strings.join(jobcmd.getCommand(), " "), choreWatcherKey(), true);
replicate.setRebalanceSource(sourceHostUUID);
replicate.setRebalanceTarget(targetHostUUID);
spawn.sendControlMessage(replicate);
log.warn("[task.mover] replicating job/task " + task.getJobKey() + " from " + sourceHostUUID + " onto host " + targetHostUUID);
}
public boolean execute(boolean allowQueuedTasks) {
targetHost = spawn.getHostState(targetHostUUID);
if (taskKey == null || !spawn.checkStatusForMove(targetHostUUID) || !spawn.checkStatusForMove(sourceHostUUID)) {
log.warn("[task.mover] erroneous input; terminating for: " + taskKey);
return false;
}
job = spawn.getJob(taskKey);
task = job.getTask(taskKey.getNodeNumber());
if (task == null) {
log.warn("[task.mover] failed to find job or task for: " + taskKey);
return false;
}
HostState liveHost = spawn.getHostState(task.getHostUUID());
if (liveHost == null || !liveHost.hasLive(task.getJobKey())) {
log.warn("[task.mover] failed to find live task for: " + taskKey);
return false;
}
if (!task.getHostUUID().equals(sourceHostUUID) && !task.hasReplicaOnHost(sourceHostUUID)) {
log.warn("[task.mover] failed because the task does not have a copy on the specified source: " + taskKey);
}
if (task.getAllTaskHosts().contains(targetHostUUID) || targetHost.hasLive(taskKey)) {
log.warn("[task.mover] cannot move onto a host with an existing version of task: " + taskKey);
return false;
}
if (!spawn.prepareTaskStatesForRebalance(job, task, allowQueuedTasks, isMigration)) {
log.warn("[task.mover] couldn't set task states; terminating for: " + taskKey);
return false;
}
try {
task.setRebalanceSource(sourceHostUUID);
task.setRebalanceTarget(targetHostUUID);
startReplicate();
return true;
} catch (Exception ex) {
log.warn("[task.mover] exception during replicate initiation; terminating for task: " + taskKey, ex);
task.setErrorCode(JobTaskErrorCode.EXIT_REPLICATE_FAILURE);
task.setState(JobTaskState.ERROR);
return false;
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("TaskMover");
sb.append("{taskKey=").append(taskKey);
sb.append(", targetHostUUID='").append(targetHostUUID).append('\'');
sb.append(", sourceHostUUID='").append(sourceHostUUID).append('\'');
sb.append(", job=").append(job);
sb.append(", task=").append(task);
sb.append(", kickOnComplete=").append(kickOnComplete);
sb.append('}');
return sb.toString();
}
}
public boolean checkStatusForMove(String hostID) {
HostState host = getHostState(hostID);
if (host == null) {
log.warn("[host.status] received null host for id " + hostID);
return false;
}
if (host.isDead() || !host.isUp()) {
log.warn("[host.status] host is down: " + hostID);
return false;
}
return true;
}
public boolean prepareTaskStatesForRebalance(Job job, JobTask task, boolean allowQueuedTasks, boolean isMigration) {
jobLock.lock();
try {
if (task.getState() != JobTaskState.IDLE && (!allowQueuedTasks && task.getState() != JobTaskState.QUEUED)) {
log.warn("[task.mover] decided not to move non-idle task " + task);
return false;
}
JobTaskState newState = isMigration ? JobTaskState.MIGRATING : JobTaskState.REBALANCE;
job.setTaskState(task, newState, true);
queueJobTaskUpdateEvent(job);
return true;
} finally {
jobLock.unlock();
}
}
/**
* exclude failed hosts from eligible pool
* iterate over tasks
* assemble hosts job spread across
* count replicas per host
* iterate over tasks and make reductions
* iterate over tasks and make additions
* exclude task host from replica
* assign in order of least replicas per host
* <p/>
* TODO synchronize on job
* TODO allow all cluster hosts to be considered for replicas
* TODO consider host group "rack aware" keep 1/first replica in same group
*
* @return true if rebalance was successful
*/
public boolean rebalanceReplicas(Job job) throws Exception {
// perform read/write and read only replication
return rebalanceReplicas(job, false) && rebalanceReplicas(job, true);
}
/**
* exclude failed hosts from eligible pool
* iterate over tasks
* assemble hosts job spread across
* count replicas per host
* iterate over tasks and make reductions
* iterate over tasks and make additions
* exclude task host from replica
* assign in order of least replicas per host
* <p/>
* TODO synchronize on job
* TODO allow all cluster hosts to be considered for replicas
* TODO consider host group "rack aware" keep 1/first replica in same group
*
* @param job the job to rebalance replicas
* @param taskID The task # to fill out replicas, or -1 for all tasks
* @param readOnly Whether to fill out readonly replicas or standard replicas
* @return true if rebalance was successful
*/
public boolean rebalanceReplicas(Job job, int taskID, boolean readOnly) throws Exception {
if (job == null) {
return false;
}
boolean success = true;
// Ensure that there aren't any replicas pointing towards the live host or duplicate replicas
balancer.removeInvalidReplicas(job, readOnly);
// Ask SpawnBalancer where new replicas should be sent
Map<Integer, List<String>> replicaAssignments = balancer.getAssignmentsForNewReplicas(job, taskID, readOnly);
List<JobTask> tasks = taskID > 0 ? Arrays.asList(job.getTask(taskID)) : job.getCopyOfTasks();
for (JobTask task : tasks) {
List<String> replicasToAdd = replicaAssignments.get(task.getTaskID());
// Make the new replicas as dictated by SpawnBalancer
if (readOnly) {
task.setReadOnlyReplicas(addReplicasAndRemoveExcess(task, replicasToAdd, job.getReadOnlyReplicas(), task.getReadOnlyReplicas()));
} else {
task.setReplicas(addReplicasAndRemoveExcess(task, replicasToAdd, job.getReplicas(), task.getReplicas()));
}
}
if (!readOnly) {
success = validateReplicas(job);
}
return success;
}
public boolean rebalanceReplicas(Job job, boolean readOnly) throws Exception {
return rebalanceReplicas(job, -1, readOnly);
}
/**
* check all tasks. If there are still not enough replicas, record failure.
*
* @param job - the job to validate
* @return true if the job has met its replica requirements
*/
private boolean validateReplicas(Job job) {
for (JobTask task : job.getCopyOfTasks()) {
List<JobTaskReplica> replicas = task.getReplicas();
if (job.getReplicas() > 0) {
if (replicas == null || replicas.size() < job.getReplicas()) {
HostState currHost = getHostState(task.getHostUUID());
if ((currHost == null || currHost.isDead()) && (replicas == null || replicas.size() == 0)) // If current host is dead and there are no replicas, mark degraded
{
job.setState(JobState.DEGRADED);
} else {
job.setState(JobState.ERROR); // Otherwise, just mark errored so we will know that at least on replica failed
job.setEnabled(false);
}
log.warn("[replica.add] ERROR - unable to replicate task because there are not enough suitable hosts, job: " + job.getId());
return false;
}
}
}
return true;
}
private List<JobTaskReplica> addReplicasAndRemoveExcess(JobTask task, List<String> replicaHostsToAdd,
int desiredNumberOfReplicas,
List<JobTaskReplica> currentReplicas) throws Exception {
List<JobTaskReplica> newReplicas = (currentReplicas == null ? new ArrayList<JobTaskReplica>() : new ArrayList<>(currentReplicas));
if (replicaHostsToAdd != null) {
newReplicas.addAll(replicateTask(task, replicaHostsToAdd));
}
if (!isNewTask(task)) {
while (newReplicas.size() > desiredNumberOfReplicas) {
JobTaskReplica replica = newReplicas.remove(newReplicas.size() - 1);
spawnMQ.sendControlMessage(new CommandTaskDelete(replica.getHostUUID(), task.getJobUUID(), task.getTaskID(), task.getRunCount()));
log.warn("[replica.delete] " + task.getJobUUID() + "/" + task.getTaskID() + " from " + replica.getHostUUID() + " @ " + getHostState(replica.getHostUUID()).getHost());
}
}
return newReplicas;
}
protected List<JobTaskReplica> replicateTask(JobTask task, List<String> targetHosts) {
List<JobTaskReplica> newReplicas = new ArrayList<>();
for (String targetHostUUID : targetHosts) {
JobTaskReplica replica = new JobTaskReplica();
replica.setHostUUID(targetHostUUID);
replica.setJobUUID(task.getJobUUID());
newReplicas.add(replica);
}
Job job = getJob(task.getJobUUID());
JobCommand jobcmd = job.getSubmitCommand();
String command = (jobcmd != null && jobcmd.getCommand() != null) ? Strings.join(jobcmd.getCommand(), " ") : null;
spawnMQ.sendControlMessage(new CommandTaskReplicate(task.getHostUUID(), task.getJobUUID(), task.getTaskID(), getTaskReplicaTargets(task, newReplicas), command, null, false));
log.warn("[replica.add] " + task.getJobUUID() + "/" + task.getTaskID() + " to " + targetHosts);
return newReplicas;
}
private void updateJobDependencies(IJob job) {
String jobId = job.getId();
DirectedGraph<String> dependencies = spawnState.jobDependencies;
Set<String> sources = dependencies.getSourceEdges(jobId);
if (sources != null) {
for (String source : sources) {
dependencies.removeEdge(source, jobId);
}
} else {
dependencies.addNode(jobId);
}
Set<String> newSources = this.getDataSources(jobId);
if (newSources != null) {
for (String source : newSources) {
dependencies.addEdge(source, jobId);
}
}
}
/**
* Submit a config update to the job store
*
* @param jobId The job to submit
* @param commitMessage If specified, the commit message to use
*/
public void submitConfigUpdate(String jobId, String commitMessage) {
Job job;
if (jobId == null || jobId.isEmpty() || (job = getJob(jobId)) == null) {
return;
}
if (jobStore != null) {
jobStore.submitConfigUpdate(job.getId(), job.getOwner(), getJobConfig(jobId), commitMessage);
}
}
public void updateJob(IJob ijob) throws Exception {
updateJob(ijob, true);
}
/**
* requires 'job' to be a different object from the one in cache. make sure
* to clone() any job fetched from cache before submitting to updateJob().
*/
public void updateJob(IJob ijob, boolean reviseReplicas) throws Exception {
if (useZk) {
Job job = new Job(ijob);
jobLock.lock();
try {
require(getJob(job.getId()) != null, "job " + job.getId() + " does not exist");
updateJobDependencies(job);
Job oldjob = putJobInSpawnState(job);
// take action on trigger changes (like # replicas)
if (oldjob != job && reviseReplicas) {
int oldReplicaCount = oldjob.getReplicas();
int newReplicaCount = job.getReplicas();
require(oldReplicaCount == newReplicaCount || job.getState() == JobState.IDLE || job.getState() == JobState.DEGRADED, "job must be IDLE or DEGRADED to change replicas");
require(newReplicaCount < monitored.size(), "replication factor must be < # live hosts");
rebalanceReplicas(job);
}
sendJobUpdateEvent(job);
} finally {
jobLock.unlock();
}
}
}
public void putAlert(String alertId, JobAlert alert) {
jobAlertRunner.putAlert(alertId, alert);
}
public void removeAlert(String alertId) {
jobAlertRunner.removeAlert(alertId);
}
public JSONArray fetchAllAlertsArray() {
return jobAlertRunner.getAlertStateArray();
}
public JSONObject fetchAllAlertsMap() {
return jobAlertRunner.getAlertStateMap();
}
public String getAlert(String alertId) {
return jobAlertRunner.getAlert(alertId);
}
public static enum DeleteStatus {
SUCCESS, JOB_MISSING, JOB_DO_NOT_DELETE
}
public DeleteStatus deleteJob(String jobUUID) throws Exception {
jobLock.lock();
try {
Job job = getJob(jobUUID);
if (job == null) {
return DeleteStatus.JOB_MISSING;
}
if (job.getDontDeleteMe()) {
return DeleteStatus.JOB_DO_NOT_DELETE;
}
spawnState.jobs.remove(jobUUID);
spawnState.jobDependencies.removeNode(jobUUID);
log.warn("[job.delete] " + job.getId() + " >> " + job.getCopyOfTasks());
spawnMQ.sendControlMessage(new CommandTaskDelete(HostMessage.ALL_HOSTS, job.getId(), null, job.getRunCount()));
sendJobUpdateEvent("job.delete", job);
if (jobConfigManager != null) {
jobConfigManager.deleteJob(job.getId());
}
if (jobStore != null) {
jobStore.delete(jobUUID);
}
return DeleteStatus.SUCCESS;
} finally {
jobLock.unlock();
}
}
public void sendControlMessage(HostMessage hostMessage) {
spawnMQ.sendControlMessage(hostMessage);
}
/**
* Deletes a job only a specific host, useful when there are replicas and
* a job has been migrated to another host
*
* @param jobUUID The job to delete
* @param hostUuid The host where the delete message should be sent
* @param node The specific task to be deleted
* @param isReplica Whether the task to be deleted is a replica or a live
* @return True if the task is successfully removed
*/
public boolean deleteTask(String jobUUID, String hostUuid, Integer node, boolean isReplica) {
jobLock.lock();
try {
if (jobUUID == null || node == null) {
return false;
}
log.warn("[job.delete.host] " + hostUuid + "/" + jobUUID + " >> " + node);
spawnMQ.sendControlMessage(new CommandTaskDelete(hostUuid, jobUUID, node, 0));
Job job = getJob(jobUUID);
if (isReplica && job != null) {
JobTask task = job.getTask(node);
task.setReplicas(removeReplicasForHost(hostUuid, task.getReplicas()));
task.setReadOnlyReplicas(removeReplicasForHost(hostUuid, task.getReadOnlyReplicas()));
queueJobTaskUpdateEvent(job);
}
return true;
} finally {
jobLock.unlock();
}
}
private List<JobTaskReplica> removeReplicasForHost(String hostUuid, List<JobTaskReplica> currentReplicas) {
if (currentReplicas == null || currentReplicas.size() == 0) {
return new ArrayList<>();
}
List<JobTaskReplica> replicasCopy = new ArrayList<>(currentReplicas);
Iterator<JobTaskReplica> iterator = replicasCopy.iterator();
while (iterator.hasNext()) {
JobTaskReplica replica = iterator.next();
if (replica.getHostUUID().equals(hostUuid)) {
iterator.remove();
}
}
return replicasCopy;
}
/**
* The entry point for requests to start every task from a job (for example, from the UI.)
*
* @param jobUUID Job ID
* @param isManualKick Whether the task came from the interface, which is given special treatment during quiesce
* @throws Exception
*/
public void startJob(String jobUUID, boolean isManualKick) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
require(job.isEnabled(), "job disabled");
require(scheduleJob(job, isManualKick), "unable to schedule job");
sendJobUpdateEvent(job);
}
public String expandJob(String jobUUID) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
return expandJob(job);
}
public String expandJob(Job job) throws TokenReplacerOverflowException {
return expandJob(job.getId(), job.getParameters(), getJobConfig(job.getId()));
}
public boolean moveTask(String jobID, int node, String sourceUUID, String targetUUID) {
if (sourceUUID == null || targetUUID == null || sourceUUID.equals(targetUUID)) {
log.warn("[task.move] fail: invalid input " + sourceUUID + "," + targetUUID);
return false;
}
TaskMover tm = new TaskMover(this, new JobKey(jobID, node), targetUUID, sourceUUID, false);
log.warn("[task.move] attempting move for " + jobID + " / " + node);
return tm.execute(false);
}
public String expandJob(String id, Collection<JobParameter> parameters, String rawConfig)
throws TokenReplacerOverflowException {
// macro recursive expansion
String pass0 = JobExpand.macroExpand(this, rawConfig);
// template in params that "may" contain other macros
String pass1 = JobExpand.macroTemplateParams(pass0, parameters);
// macro recursive expansion again
String pass2 = JobExpand.macroExpand(this, pass1);
// replace remaining params not caught in pass 1
String pass3 = JobExpand.macroTemplateParams(pass2, parameters);
// inject job metadata from spawn
return JobExpand.magicMacroExpand(this, pass3, id);
}
public void stopJob(String jobUUID) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
for (JobTask task : job.getCopyOfTasks()) {
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
}
stopTask(jobUUID, task.getTaskID());
}
job.setHadMoreData(false);
}
public void killJob(String jobUUID) throws Exception {
boolean success = false;
while (!success & !shuttingDown.get()) {
try {
jobLock.lock();
if (taskQueuesByPriority.tryLock()) {
success = true;
Job job = getJob(jobUUID);
require(job != null, "job not found");
for (JobTask task : job.getCopyOfTasks()) {
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
}
killTask(jobUUID, task.getTaskID());
}
job.setHadMoreData(false);
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
}
}
/**
* not a clone like jobs, because there is no updater.
* yes, there is no clean symmetry here. it could use cleanup.
*/
public JobTask getTask(String jobUUID, int taskID) {
Job job = getJob(jobUUID);
if (job != null) {
return job.getTask(taskID);
}
return null;
}
public JobTask getTask(JobKey jobKey) {
if (jobKey == null || jobKey.getJobUuid() == null || jobKey.getNodeNumber() == null) {
return null;
}
return getTask(jobKey.getJobUuid(), jobKey.getNodeNumber());
}
/**
* The entry point for requests to start tasks (for example, from the UI.) Does some checking, and ultimately
* kicks the task or adds it to the task queue as appropriate
*
* @param jobUUID Job ID
* @param taskID Node #
* @param addToQueue Whether the task should be added to the queue (false if the task is already on the queue)
* @param isManualKick Whether the task came from the interface, which is given special treatment during quiesce
* @param toQueueHead Whether to add the task to the head of the queue rather than the end
* @throws Exception When the task is invalid or already active
*/
public void startTask(String jobUUID, int taskID, boolean addToQueue, boolean isManualKick, boolean toQueueHead) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
require(job.isEnabled(), "job is disabled");
require(job.getState() != JobState.DEGRADED, "job in degraded state");
require(taskID >= 0, "invalid task id");
JobTask task = getTask(jobUUID, taskID);
require(task != null, "no such task");
require(task.getState() != JobTaskState.BUSY && task.getState() != JobTaskState.ALLOCATED &&
task.getState() != JobTaskState.QUEUED, "invalid task state");
if (addToQueue) {
addToTaskQueue(task.getJobKey(), isManualKick && quiesce, toQueueHead);
} else {
kickIncludingQueue(job, task, expandJob(job), false, isManualKick && quiesce);
}
log.warn("[task.kick] started " + job.getId() + " / " + task.getTaskID() + " = " + job.getDescription());
queueJobTaskUpdateEvent(job);
}
public void stopTask(String jobUUID, int taskID) throws Exception {
stopTask(jobUUID, taskID, false, false);
}
private void stopTask(String jobUUID, int taskID, boolean force, boolean onlyIfQueued) throws Exception {
Job job = getJob(jobUUID);
JobTask task = getTask(jobUUID, taskID);
if (job != null && task != null) {
taskQueuesByPriority.setStoppedJob(true); // Terminate the current queue iteration cleanly
HostState host = getHostState(task.getHostUUID());
if (force) {
task.setRebalanceSource(null);
task.setRebalanceTarget(null);
}
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
log.warn("[task.stop] stopping queued " + task.getJobKey());
} else if (task.getState() == JobTaskState.REBALANCE) {
log.warn("[task.stop] stopping rebalancing " + task.getJobKey() + " with force=" + force);
} else if (task.getState() == JobTaskState.MIGRATING ) {
log.warn("[task.stop] stopping migrating " + task.getJobKey());
task.setRebalanceSource(null);
task.setRebalanceTarget(null);
}
else if (force && (task.getState() == JobTaskState.REVERT)) {
log.warn("[task.stop] " + task.getJobKey() + " killed in state " + task.getState());
int code = JobTaskErrorCode.EXIT_REVERT_FAILURE;
job.errorTask(task, code);
queueJobTaskUpdateEvent(job);
} else if (force && (host == null || host.isDead() || !host.isUp())) {
log.warn("[task.stop] " + task.getJobKey() + " killed on down host");
job.errorTask(task, 1);
queueJobTaskUpdateEvent(job);
// Host is unreachable; bail once the task is errored.
return;
} else if (host != null && !host.hasLive(task.getJobKey())) {
log.warn("[task.stop] node that minion doesn't think is running: " + task.getJobKey());
job.setTaskState(task, JobTaskState.IDLE);
queueJobTaskUpdateEvent(job);
}
else if (task.getState() == JobTaskState.ALLOCATED) {
log.warn("[task.stop] node in allocated state " + jobUUID + "/" + taskID + " host = " + (host != null ? host.getHost() : "unknown"));
}
// The following is called regardless of task state, unless the host is nonexistent/failed
if (host != null) {
spawnMQ.sendControlMessage(new CommandTaskStop(host.getHostUuid(), jobUUID, taskID, job.getRunCount(), force, onlyIfQueued));
} else {
log.warn("[task.stop]" + jobUUID + "/" + taskID + "]: no host monitored for uuid " + task.getHostUUID());
}
} else {
log.warn("[task.stop]" + jobUUID + "]: no nodes");
}
}
protected boolean removeFromQueue(JobTask task) {
boolean removed = false;
Job job = getJob(task.getJobUUID());
if (job != null) {
log.warn("[taskQueuesByPriority] setting " + task.getJobKey() + " as idle and removing from queue");
job.setTaskState(task, JobTaskState.IDLE);
removed = taskQueuesByPriority.remove(job.getPriority(), task.getJobKey());
queueJobTaskUpdateEvent(job);
sendTaskQueueUpdateEvent();
}
writeSpawnQueue();
return removed;
}
public void killTask(String jobUUID, int taskID) throws Exception {
stopTask(jobUUID, taskID, true, false);
}
public void revertJobOrTask(String jobUUID, int taskID, String backupType, int rev, long time) throws Exception {
if (taskID == -1) {
// Revert entire job
Job job = getJob(jobUUID);
int numTasks = job.getTaskCount();
for (int i = 0; i < numTasks; i++) {
log.warn("[task.revert] " + jobUUID + "/" + i);
revert(jobUUID, backupType, rev, time, i);
}
} else {
// Revert single task
log.warn("[task.revert] " + jobUUID + "/" + taskID);
revert(jobUUID, backupType, rev, time, taskID);
}
}
private void revert(String jobUUID, String backupType, int rev, long time, int taskID) throws Exception {
JobTask task = getTask(jobUUID, taskID);
if (task != null) {
task.setPreFailErrorCode(0);
HostState host = getHostState(task.getHostUUID());
if (task.getState() == JobTaskState.ALLOCATED || task.getState() == JobTaskState.QUEUED) {
log.warn("[task.revert] node in allocated state " + jobUUID + "/" + task.getTaskID() + " host = " + host.getHost());
}
log.warn("[task.revert] sending revert message to host: " + host.getHost() + "/" + host.getHostUuid());
spawnMQ.sendControlMessage(new CommandTaskRevert(host.getHostUuid(), jobUUID, task.getTaskID(), backupType, rev, time, getTaskReplicaTargets(task, task.getAllReplicas()), false));
} else {
log.warn("[task.revert] task " + jobUUID + "/" + taskID + "] not found");
}
}
public Collection<String> listCommands() {
synchronized (spawnState.commands) {
return spawnState.commands.keySet();
}
}
public JobCommand getCommand(String key) {
synchronized (spawnState.commands) {
return spawnState.commands.get(key);
}
}
public void putCommand(String key, JobCommand command, boolean store) throws Exception {
synchronized (spawnState.commands) {
spawnState.commands.put(key, command);
}
if (useZk && store) {
spawnDataStore.putAsChild(SPAWN_COMMON_COMMAND_PATH, key, new String(codec.encode(command)));
}
}
public boolean deleteCommand(String key) throws Exception {
/* prevent deletion of commands used in jobs */
for (Job job : listJobs()) {
if (job.getCommand() != null && job.getCommand().equals(key)) {
return false;
}
}
synchronized (spawnState.commands) {
JobCommand cmd = spawnState.commands.remove(key);
if (cmd != null) {
spawnDataStore.deleteChild(SPAWN_COMMON_COMMAND_PATH, key);
return true;
} else {
return false;
}
}
}
public Collection<String> listMacros() {
synchronized (spawnState.macros) {
return spawnState.macros.keySet();
}
}
public JobMacro getMacro(String key) {
synchronized (spawnState.macros) {
return spawnState.macros.get(key.trim());
}
}
public void putMacro(String key, JobMacro macro, boolean store) throws Exception {
key = key.trim();
synchronized (spawnState.macros) {
spawnState.macros.put(key, macro);
}
if (store) {
spawnDataStore.putAsChild(SPAWN_COMMON_MACRO_PATH, key, new String(codec.encode(macro)));
}
}
public boolean deleteMacro(String key) {
/* prevent deletion of macros used in job configs */
for (Job job : listJobs()) {
String rawconf = getJobConfig(job.getId());
if (rawconf != null && rawconf.contains("%{" + key + "}%")) {
return false;
}
}
synchronized (spawnState.macros) {
JobMacro macro = spawnState.macros.remove(key);
if (macro != null) {
spawnDataStore.deleteChild(SPAWN_COMMON_MACRO_PATH, key);
return true;
} else {
return false;
}
}
}
// --------------------- END API ----------------------
private List<HostState> getOrCreateHostStateList(String minionType, Collection<String> hostList) {
List<HostState> hostStateList;
if (hostList == null || hostList.size() == 0) {
hostStateList = balancer.sortHostsByActiveTasks(listHostStatus(minionType));
} else {
hostStateList = new ArrayList<>();
for (String hostId : hostList) {
hostStateList.add(getHostState(hostId));
}
}
return hostStateList;
}
/**
* mq message dispatch
*/
protected void handleMessage(CoreMessage core) {
Job job;
JobTask task;
if (deadMinionMembers.getMemberSet().contains(core.getHostUuid())) {
log.warn("[mq.core] ignoring message from host: " + core.getHostUuid() + " because it is dead");
return;
}
switch (core.getMessageType()) {
default:
log.warn("[mq.core] unhandled type = " + core.getMessageType());
break;
case CMD_TASK_NEW:
// ignore these replication-related messages sent by minions
break;
case STATUS_HOST_INFO:
Set<String> upMinions = minionMembers.getMemberSet();
HostState state = (HostState) core;
HostState oldState = getHostState(state.getHostUuid());
if (oldState == null) {
log.warn("[host.status] from unmonitored " + state.getHostUuid() + " = " + state.getHost() + ":" + state.getPort());
}
boolean hostEnabled = true;
synchronized (disabledHosts) {
if (disabledHosts.contains(state.getHost()) || disabledHosts.contains(state.getHostUuid())) {
hostEnabled = false;
state.setDisabled(true);
} else {
state.setDisabled(false);
}
}
// Propagate minion state for ui
if (upMinions.contains(state.getHostUuid()) && hostEnabled) {
state.setUp(true);
}
state.setUpdated();
sendHostUpdateEvent(state);
updateHostState(state);
break;
case STATUS_TASK_BEGIN:
StatusTaskBegin begin = (StatusTaskBegin) core;
tasksStartedPerHour.mark();
if (debug("-begin-")) {
log.info("[task.begin] :: " + begin.getJobKey());
}
try {
job = getJob(begin.getJobUuid());
if (job == null) {
log.warn("[task.begin] on dead job " + begin.getJobKey() + " from " + begin.getHostUuid());
break;
}
if (job.getStartTime() == null) {
job.setStartTime(System.currentTimeMillis());
}
JobTask node = null;
for (JobTask jobNode : job.getCopyOfTasks()) {
if (jobNode.getTaskID() == begin.getNodeID()) {
node = jobNode;
break;
}
}
if (node != null) {
job.setTaskState(node, JobTaskState.BUSY);
node.incrementStarts();
queueJobTaskUpdateEvent(job);
} else {
log.warn("[task.begin] done report for missing node " + begin.getJobKey());
}
} catch (Exception ex) {
log.warn("", ex);
}
break;
case STATUS_TASK_CANT_BEGIN:
StatusTaskCantBegin cantBegin = (StatusTaskCantBegin) core;
log.info("[task.cantbegin] received cantbegin from " + cantBegin.getHostUuid() + " for task " + cantBegin.getJobUuid() + "," + cantBegin.getNodeID());
job = getJob(cantBegin.getJobUuid());
task = getTask(cantBegin.getJobUuid(), cantBegin.getNodeID());
if (job != null && task != null) {
try {
job.setTaskState(task, JobTaskState.IDLE);
log.info("[task.cantbegin] kicking " + task.getJobKey());
startTask(cantBegin.getJobUuid(), cantBegin.getNodeID(), true, true, true);
} catch (Exception ex) {
log.warn("[task.schedule] failed to reschedule task for " + task.getJobKey(), ex);
}
} else {
log.warn("[task.cantbegin] received cantbegin from " + cantBegin.getHostUuid() + " for nonexistent job " + cantBegin.getJobUuid());
}
break;
case STATUS_TASK_PORT:
StatusTaskPort port = (StatusTaskPort) core;
job = getJob(port.getJobUuid());
task = getTask(port.getJobUuid(), port.getNodeID());
if (task != null) {
log.info("[task.port] " + job.getId() + "/" + task.getTaskID() + " @ " + port.getPort());
task.setPort(port.getPort());
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_BACKUP:
StatusTaskBackup backup = (StatusTaskBackup) core;
job = getJob(backup.getJobUuid());
task = getTask(backup.getJobUuid(), backup.getNodeID());
if (task != null && task.getState() != JobTaskState.REBALANCE && task.getState() != JobTaskState.MIGRATING) {
log.info("[task.backup] " + job.getId() + "/" + task.getTaskID());
job.setTaskState(task, JobTaskState.BACKUP);
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REPLICATE:
StatusTaskReplicate replicate = (StatusTaskReplicate) core;
job = getJob(replicate.getJobUuid());
task = getTask(replicate.getJobUuid(), replicate.getNodeID());
if (task != null) {
log.info("[task.replicate] " + job.getId() + "/" + task.getTaskID());
JobTaskState taskState = task.getState();
if (taskState != JobTaskState.REBALANCE && taskState != JobTaskState.MIGRATING) {
job.setTaskState(task, replicate.isFullReplication() ? JobTaskState.FULL_REPLICATE : JobTaskState.REPLICATE, true);
}
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REVERT:
StatusTaskRevert revert = (StatusTaskRevert) core;
job = getJob(revert.getJobUuid());
task = getTask(revert.getJobUuid(), revert.getNodeID());
if (task != null) {
log.info("[task.revert] " + job.getId() + "/" + task.getTaskID());
job.setTaskState(task, JobTaskState.REVERT, true);
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REPLICA:
StatusTaskReplica replica = (StatusTaskReplica) core;
job = getJob(replica.getJobUuid());
task = getTask(replica.getJobUuid(), replica.getNodeID());
if (task != null) {
for (JobTaskReplica taskReplica : task.getReplicas()) {
if (taskReplica.getHostUUID().equals(replica.getHostUuid())) {
taskReplica.setVersion(replica.getVersion());
taskReplica.setLastUpdate(replica.getUpdateTime());
}
}
log.info("[task.replica] version updated for " + job.getId() + "/" + task.getTaskID() + " ver " + task.getRunCount() + "/" + replica.getVersion());
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_END:
StatusTaskEnd update = (StatusTaskEnd) core;
log.info("[task.end] :: " + update.getJobUuid() + "/" + update.getNodeID() + " exit=" + update.getExitCode());
tasksCompletedPerHour.mark();
taskQueuesByPriority.markHostAvailable(update.getHostUuid());
try {
job = getJob(update.getJobUuid());
if (job == null) {
log.warn("[task.end] on dead job " + update.getJobKey() + " from " + update.getHostUuid());
break;
}
task = getTask(update.getJobUuid(), update.getNodeID());
if (task.getHostUUID() != null && !task.getHostUUID().equals(update.getHostUuid())) {
log.warn("[task.end] received from incorrect host " + update.getHostUuid());
break;
}
handleStatusTaskEnd(job, task, update);
} catch (Exception ex) {
log.warn("Failed to handle end message: " + ex, ex);
}
break;
}
}
/**
* Handle the various actions in response to a StatusTaskEnd sent by a minion
*
* @param job The job to modify
* @param task The task to modify
* @param update The message
*/
private void handleStatusTaskEnd(Job job, JobTask task, StatusTaskEnd update) {
TaskExitState exitState = update.getExitState();
boolean more = exitState != null && exitState.hadMoreData();
boolean wasStopped = exitState != null && exitState.getWasStopped();
task.setFileCount(update.getFileCount());
task.setByteCount(update.getByteCount());
boolean errored = update.getExitCode() != 0 && update.getExitCode() != JobTaskErrorCode.REBALANCE_PAUSE;
if (update.getRebalanceSource() != null) {
handleRebalanceFinish(job, task, update);
} else {
if (exitState != null) {
task.setInput(exitState.getInput());
task.setMeanRate(exitState.getMeanRate());
task.setTotalEmitted(exitState.getTotalEmitted());
}
if (more) {
job.setHadMoreData(more);
}
task.setWasStopped(wasStopped);
}
if (errored) {
handleTaskError(job, task, update.getExitCode());
} else {
job.setTaskFinished(task);
}
if (job.isFinished() && update.getRebalanceSource() == null) {
finishJob(job, errored);
}
queueJobTaskUpdateEvent(job);
}
public void handleTaskError(Job job, JobTask task, int exitCode) {
log.warn("[task.end] " + task.getJobKey() + " exited abnormally with " + exitCode);
task.incrementErrors();
try {
spawnJobFixer.fixTask(job, task, exitCode);
} catch (Exception ex) {
job.errorTask(task, exitCode);
}
}
public void handleRebalanceFinish(Job job, JobTask task, StatusTaskEnd update) {
String rebalanceSource = update.getRebalanceSource();
String rebalanceTarget = update.getRebalanceTarget();
if (update.getExitCode() == 0) {
// Rsync succeeded. Swap to the new host, assuming it is still healthy.
task.setRebalanceSource(null);
task.setRebalanceTarget(null);
if (checkHostStatesForSwap(task.getJobKey(), rebalanceSource, rebalanceTarget, false)) {
if (task.getHostUUID().equals(rebalanceSource)) {
task.setHostUUID(rebalanceTarget);
} else {
task.replaceReplica(rebalanceSource, update.getRebalanceTarget());
}
deleteTask(job.getId(), rebalanceSource, task.getTaskID(), false);
} else {
// The hosts returned by end message were not found, or weren't in a usable state.
fixTaskDir(job.getId(), task.getTaskID(), true, true);
}
} else if (update.getExitCode() == JobTaskErrorCode.REBALANCE_PAUSE) {
// Rebalancing was paused. No special action necessary.
log.warn("[task.move] task rebalance for " + task.getJobKey() + " paused until next run");
} else {
// The rsync failed. Clean up the extra task directory.
fixTaskDir(job.getId(), task.getTaskID(), true, true);
}
}
private void doOnState(Job job, String url, String state) {
if (Strings.isEmpty(url)) {
return;
}
if (url.startsWith("http://")) {
try {
quietBackgroundPost(state + " " + job.getId(), url, codec.encode(job));
} catch (Exception e) {
log.warn("", e);
}
} else if (url.startsWith("kick://")) {
Map<String, List<String>> aliasMap = getAliases();
for (String kick : Strings.splitArray(url.substring(7), ",")) {
kick = kick.trim();
List<String> aliases = aliasMap.get(kick);
if (aliases != null) {
for (String alias : aliases) {
safeStartJob(alias.trim());
}
} else {
safeStartJob(kick);
}
}
} else {
log.warn("invalid onState url: " + url + " for " + job.getId());
}
}
private void safeStartJob(String uuid) {
try {
startJob(uuid, false);
} catch (Exception ex) {
log.warn("[safe.start] " + uuid + " failed due to " + ex);
}
}
/**
* Perform cleanup tasks once per job completion. Triggered when the last running task transitions to an idle state.
* In particular: perform any onComplete/onError triggers, set the end time, and possibly do a fixdirs.
* @param job The job that just finished
* @param errored Whether the job ended up in error state
*/
private void finishJob(Job job, boolean errored) {
log.info("[job.done] " + job.getId() + " :: errored=" + errored + ". callback=" + job.getOnCompleteURL());
jobsCompletedPerHour.mark();
job.setFinishTime(System.currentTimeMillis());
spawnFormattedLogger.finishJob(job);
if (!quiesce) {
if (!errored) {
/* rekick if any task had more work to do */
if (job.hadMoreData()) {
log.warn("[job.done] " + job.getId() + " :: rekicking on more data");
try {
scheduleJob(job, false);
} catch (Exception ex) {
log.warn("", ex);
}
} else {
doOnState(job, job.getOnCompleteURL(), "onComplete");
if (ENABLE_JOB_FIXDIRS_ONCOMPLETE && job.getRunCount() > 1) {
// Perform a fixDirs on completion, cleaning up missing replicas/orphans.
fixTaskDir(job.getId(), -1, false, true);
}
}
} else {
doOnState(job, job.getOnErrorURL(), "onError");
}
}
balancer.requestJobSizeUpdate(job.getId(), 0);
}
private void quietBackgroundPost(String threadName, final String url, final byte[] post) {
new Thread(threadName) {
public void run() {
try {
HttpUtil.httpPost(url, "javascript/text", post, 60000);
} catch (Exception ex) {
log.warn("", ex);
}
}
}.start();
}
/**
* simpler wrapper for Runtime.exec() with logging
*/
private int exec(String cmd[]) throws InterruptedException, IOException {
if (debug("-exec-")) {
log.info("[exec.cmd] " + Strings.join(cmd, " "));
}
Process proc = Runtime.getRuntime().exec(cmd);
InputStream in = proc.getInputStream();
String buf = Bytes.toString(Bytes.readFully(in));
if (debug("-exec-") && buf.length() > 0) {
String lines[] = Strings.splitArray(buf, "\n");
for (String line : lines) {
log.info("[exec.out] " + line);
}
}
in = proc.getErrorStream();
buf = Bytes.toString(Bytes.readFully(in));
if (debug("-exec-") && buf.length() > 0) {
String lines[] = Strings.splitArray(buf, "\n");
for (String line : lines) {
log.info("[exec.err] " + line);
}
}
int exit = proc.waitFor();
if (debug("-exec-")) {
log.info("[exec.exit] " + exit);
}
return exit;
}
/**
* debug output, can be disabled by policy
*/
private boolean debug(String match) {
return debug != null && (debug.contains(match) || debug.contains("-all-"));
}
@VisibleForTesting
JobMacro createJobHostMacro(String job, int port) {
String sPort = Integer.valueOf(port).toString();
Set<String> jobHosts = new TreeSet<>();// best set?
jobLock.lock();
try {
Collection<HostState> hosts = listHostStatus(null);
Map<String, String> uuid2Host = new HashMap<>();
for (HostState host : hosts) {
if (host.isUp()) {
uuid2Host.put(host.getHostUuid(), host.getHost());
}
}
if (uuid2Host.size() == 0) {
log.warn("[createJobHostMacro] warning job was found on no available hosts: " + job);
}
IJob ijob = getJob(job);
if (ijob == null) {
log.warn("[createJobHostMacro] Unable to get job config for job: " + job);
throw new RuntimeException("[createJobHostMacro] Unable to get job config for job: " + job);
}
for (JobTask task : ijob.getCopyOfTasks()) {
String host = uuid2Host.get(task.getHostUUID());
if (host != null) {
jobHosts.add(host);
}
}
} finally {
jobLock.unlock();
}
List<String> hostStrings = new ArrayList<>();
for (String host : jobHosts) {
hostStrings.add("{host:\"" + host + "\", port:" + sPort + "}");
}
return new JobMacro("spawn", "createJobHostMacro-" + job, Joiner.on(',').join(hostStrings));
}
// TODO: 1. Why is this not in SpawnMQ? 2. Who actually listens to job config changes
// TODO: answer: this is for the web ui and live updating via SpawnManager /listen.batch
/**
* send job update event to registered listeners (usually http clients)
*/
private void sendJobUpdateEvent(Job job) {
jobLock.lock();
try {
if (jobConfigManager != null) {
jobConfigManager.updateJob(job);
}
} finally {
jobLock.unlock();
}
sendJobUpdateEvent("job.update", job);
}
public void queueJobTaskUpdateEvent(Job job) {
jobLock.lock();
try {
jobUpdateQueue.add(job.getId());
} finally {
jobLock.unlock();
}
}
public void drainJobTaskUpdateQueue() {
long start = System.currentTimeMillis();
Set<String> jobIds = new HashSet<String>();
jobUpdateQueue.drainTo(jobIds);
if (jobIds.size() > 0) {
if (log.isTraceEnabled()) {
log.trace("[drain] Draining " + jobIds.size() + " jobs from the update queue");
}
for (String jobId : jobIds) {
Job job = getJob(jobId);
sendJobUpdateEvent(job);
}
if (log.isTraceEnabled()) {
log.trace("[drain] Finished Draining " + jobIds.size() + " jobs from the update queue in " + (System.currentTimeMillis() - start) + "ms");
}
}
}
public void sendJobUpdateEvent(String label, Job job) {
try {
sendEventToClientListeners(label, getJobUpdateEvent(job));
} catch (Exception e) {
log.warn("", e);
}
}
/**
* This method adds a cluster.quiesce event to be sent to clientListeners to notify those using the UI that the cluster
* has been quiesced.
*
* @param username
*/
public void sendClusterQuiesceEvent(String username) {
try {
boolean quiesce = getSettings().getQuiesced();
JSONObject info = new JSONObject();
info.put("username", username);
info.put("date", JitterClock.globalTime());
info.put("quiesced", quiesce);
log.info("User " + username + " has " + (quiesce ? "quiesced" : "unquiesed") + " the cluster.");
sendEventToClientListeners("cluster.quiesce", info);
} catch (Exception e) {
log.warn("", e);
}
}
/**
* Adds the task.queue.size event to be sent to clientListeners on next batch.listen update
*/
public void sendTaskQueueUpdateEvent() {
try {
int numQueued = 0;
int numQueuedWaitingOnError = 0;
LinkedList<JobKey>[] queues = null;
taskQueuesByPriority.lock();
try {
//noinspection unchecked
queues = taskQueuesByPriority.values().toArray(new LinkedList[taskQueuesByPriority.size()]);
for (LinkedList<JobKey> queue : queues) {
numQueued += queue.size();
for (JobKey key : queue) {
Job job = getJob(key);
if (job != null && !job.isEnabled()) {
numQueuedWaitingOnError += 1;
}
}
}
lastQueueSize = numQueued;
} finally {
taskQueuesByPriority.unlock();
}
JSONObject json = new JSONObject("{'size':" + Integer.toString(numQueued) + ",'sizeErr':" + Integer.toString(numQueuedWaitingOnError) + "}");
sendEventToClientListeners("task.queue.size", json);
} catch (Exception e) {
log.warn("[task.queue.update] received exception while sending task queue update event (this is ok unless it happens repeatedly) " + e);
e.printStackTrace();
}
}
public int getLastQueueSize() {
return lastQueueSize;
}
public JSONObject getJobUpdateEvent(IJob job) throws Exception {
long files = 0;
long bytes = 0;
int running = 0;
int errored = 0;
int done = 0;
if (job == null) {
String errMessage = "getJobUpdateEvent called with null job";
log.warn(errMessage);
throw new Exception(errMessage);
}
List<JobTask> jobNodes = job.getCopyOfTasks();
int numNodes = 0;
if (jobNodes != null) {
numNodes = jobNodes.size();
for (JobTask task : jobNodes) {
files += task.getFileCount();
bytes += task.getByteCount();
if (task.getState() != JobTaskState.ALLOCATED && task.getState() != JobTaskState.QUEUED) {
running++;
}
switch (task.getState()) {
case IDLE:
done++;
break;
case ERROR:
done++;
errored++;
break;
}
}
}
JSONObject ojob = job.toJSON().put("config", "").put("parameters", "");
ojob.put("nodes", numNodes);
ojob.put("running", running);
ojob.put("errored", errored);
ojob.put("done", done);
ojob.put("files", files);
ojob.put("bytes", bytes);
return ojob;
}
protected void sendHostUpdateEvent(HostState state) {
sendHostUpdateEvent("host.update", state);
}
private void sendHostUpdateEvent(String label, HostState state) {
try {
sendEventToClientListeners(label, getHostStateUpdateEvent(state));
} catch (Exception e) {
log.warn("", e);
}
}
public JSONObject getHostStateUpdateEvent(HostState state) throws Exception {
if (state == null) {
return null;
}
JSONObject ohost = CodecJSON.encodeJSON(state);
ohost.put("spawnState", getSpawnStateString(state));
ohost.put("stopped", ohost.getJSONArray("stopped").length());
ohost.put("total", state.countTotalLive());
double score = 0;
try {
score = balancer.getHostScoreCached(state.getHostUuid());
} catch (NullPointerException npe) {
log.warn("[host.status] exception in getHostStateUpdateEvent", npe);
}
ohost.put("score", score);
return ohost;
}
private String getSpawnStateString(HostState state) {
if (state.isDead()) {
return "failed";
} else if (state.isDisabled()) {
return "disabled";
}
return hostFailWorker.getFailureStateString(state.getHostUuid(), state.isUp());
}
/**
* send codable message to registered listeners as json
*/
private void sendEventToClientListeners(final String topic, final JSONObject message) {
long time = System.currentTimeMillis();
for (Entry<String, ClientEventListener> ev : listeners.entrySet()) {
ClientEventListener client = ev.getValue();
// drop listeners we haven't heard from in a while
if (time - client.lastSeen > 60000) {
ClientEventListener listener = listeners.remove(ev.getKey());
if (debug("-listen-")) {
log.warn("[listen] dropping listener queue for " + ev.getKey() + " = " + listener);
}
continue;
}
try {
client.events.put(new ClientEvent(topic, message));
} catch (Exception ex) {
log.warn("", ex);
}
}
webSocketManager.addEvent(new ClientEvent(topic, message));
}
private class UpdateEventRunnable implements Runnable {
private final Map<String, Long> events = new HashMap<>();
@Override
public void run() {
HostCapacity hostmax = new HostCapacity();
HostCapacity hostused = new HostCapacity();
synchronized (monitored) {
for (HostState hs : monitored.values()) {
hostmax.add(hs.getMax());
hostused.add(hs.getUsed());
}
}
int jobshung = 0;
int jobrunning = 0;
int jobscheduled = 0;
int joberrored = 0;
int taskallocated = 0;
int taskbusy = 0;
int taskerrored = 0;
int taskqueued = 0;
long files = 0;
long bytes = 0;
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
for (JobTask jn : job.getCopyOfTasks()) {
switch (jn.getState()) {
case ALLOCATED:
taskallocated++;
break;
case BUSY:
taskbusy++;
break;
case ERROR:
taskerrored++;
break;
case IDLE:
break;
case QUEUED:
taskqueued++;
break;
}
files += jn.getFileCount();
bytes += jn.getByteCount();
}
switch (job.getState()) {
case IDLE:
break;
case RUNNING:
jobrunning++;
if (job.getStartTime() != null && job.getMaxRunTime() != null &&
(JitterClock.globalTime() - job.getStartTime() > job.getMaxRunTime() * 2)) {
jobshung++;
}
break;
case SCHEDULED:
jobscheduled++;
break;
}
if (job.getState() == JobState.ERROR) {
joberrored++;
}
}
} finally {
jobLock.unlock();
}
events.clear();
events.put("time", System.currentTimeMillis());
events.put("hosts", (long) monitored.size());
events.put("commands", (long) spawnState.commands.size());
events.put("macros", (long) spawnState.macros.size());
events.put("jobs", (long) spawnState.jobs.size());
events.put("cpus", (long) hostmax.getCpu());
events.put("cpus_used", (long) hostused.getCpu());
events.put("mem", (long) hostmax.getMem());
events.put("mem_used", (long) hostused.getMem());
events.put("io", (long) hostmax.getIo());
events.put("io_used", (long) hostused.getIo());
events.put("jobs_running", (long) jobrunning);
events.put("jobs_scheduled", (long) jobscheduled);
events.put("jobs_errored", (long) joberrored);
events.put("jobs_hung", (long) jobshung);
events.put("tasks_busy", (long) taskbusy);
events.put("tasks_allocated", (long) taskallocated);
events.put("tasks_queued", (long) taskqueued);
events.put("tasks_errored", (long) taskerrored);
events.put("files", files);
events.put("bytes", bytes);
spawnFormattedLogger.periodicState(events);
runningTaskCount.set(taskbusy);
queuedTaskCount.set(taskqueued);
failTaskCount.set(taskerrored);
runningJobCount.set(jobrunning);
queuedJobCount.set(jobscheduled);
failJobCount.set(joberrored);
hungJobCount.set(jobshung);
}
}
private void require(boolean test, String msg) throws Exception {
if (!test) {
throw new Exception("test failed with '" + msg + "'");
}
}
/**
* called by Thread registered to Runtime triggered by sig-kill
*/
void runtimeShutdownHook() {
shuttingDown.set(true);
try {
drainJobTaskUpdateQueue();
hostFailWorker.stop();
} catch (Exception ex) {
log.warn("", ex);
}
try {
if (spawnFormattedLogger != null) {
spawnFormattedLogger.close();
}
} catch (Exception ex) {
log.warn("", ex);
}
try {
closeZkClients();
} catch (Exception ex) {
log.warn("", ex);
}
}
/**
* re-kicks jobs which are on a repeating schedule
*/
private class JobRekickTask implements Runnable {
public void run() {
boolean kicked;
do {
kicked = false;
/*
* cycle through jobs and look for those that need nodes
* allocated. lock to prevent other RPCs from conflicting with scheduling.
*/
try {
if (!quiesce) {
String jobids[] = null;
jobLock.lock();
try {
jobids = new String[spawnState.jobs.size()];
jobids = spawnState.jobs.keySet().toArray(jobids);
} finally {
jobLock.unlock();
}
long clock = System.currentTimeMillis();
for (String jobid : jobids) {
Job job = getJob(jobid);
if (job == null) {
log.warn("ERROR: missing job for id " + jobid);
continue;
}
if (job.getState() == JobState.IDLE && job.getStartTime() == null && job.getEndTime() == null) {
job.setEndTime(clock);
}
// check for recurring jobs (that aren't already running)
if (job.shouldAutoRekick(clock)) {
try {
if (scheduleJob(job, false)) {
log.info("[schedule] rekicked " + job.getId());
kicked = true;
}
} catch (Exception ex) {
log.warn("[schedule] ex while rekicking, disabling " + job.getId());
job.setEnabled(false);
updateJob(job);
throw new Exception(ex);
}
}
}
}
} catch (Exception ex) {
log.warn("auto rekick failed: ", ex);
}
}
while (kicked);
}
}
protected void autobalance() {
autobalance(SpawnBalancer.RebalanceType.HOST, SpawnBalancer.RebalanceWeight.HEAVY);
}
protected void autobalance(SpawnBalancer.RebalanceType type, SpawnBalancer.RebalanceWeight weight) {
executeReallocationAssignments(balancer.getAssignmentsForAutoBalance(type, weight), false);
}
private boolean schedulePrep(Job job) {
job.setSubmitCommand(getCommand(job.getCommand()));
if (job.getSubmitCommand() == null) {
log.warn("[schedule] failed submit : invalid command " + job.getCommand());
return false;
}
return job.isEnabled();
}
private ReplicaTarget[] getTaskReplicaTargets(JobTask task, List<JobTaskReplica> replicaList) {
ReplicaTarget replicas[] = null;
if (replicaList != null) {
int next = 0;
replicas = new ReplicaTarget[replicaList.size()];
for (JobTaskReplica replica : replicaList) {
HostState host = getHostState(replica.getHostUUID());
if (host == null) {
log.warn("[getTaskReplicaTargets] error - replica host: " + replica.getHostUUID() + " does not exist!");
throw new RuntimeException("[getTaskReplicaTargets] error - replica host: " + replica.getHostUUID() + " does not exist. Rebalance the job to correct issue");
}
replicas[next++] = new ReplicaTarget(host.getHostUuid(), host.getHost(), host.getUser(), host.getPath(), task.getReplicationFactor());
}
}
return replicas;
}
/**
* Attempt to kick a task. Add it to the queue instead if appropriate.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for the job
* @param inQueue Whether the task is already in the queue (in which case we shouldn't add it again)
* @param ignoreQuiesce Whether the task can kick regardless of Spawn's quiesce state
* @throws Exception If there is a problem scheduling the task
*/
private void kickIncludingQueue(Job job, JobTask task, String config, boolean inQueue, boolean ignoreQuiesce) throws Exception {
boolean success = false;
while (!success && !shuttingDown.get()) {
jobLock.lock();
try {
if (taskQueuesByPriority.tryLock()) {
success = true;
boolean kicked = kickOnExistingHosts(job, task, config, 0L, true, ignoreQuiesce);
if (!kicked && !inQueue) {
addToTaskQueue(task.getJobKey(), ignoreQuiesce, false);
}
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
}
}
/**
* Schedule every task from a job.
*
* @param job Job to kick
* @param isManualKick If the kick is coming from the UI, which is specially allowed to run during quiesce
* @return True if the job is scheduled successfully
* @throws Exception If there is a problem scheduling a task
*/
private boolean scheduleJob(Job job, boolean isManualKick) throws Exception {
if (!schedulePrep(job)) {
return false;
}
job.setSubmitTime(JitterClock.globalTime());
job.setStartTime(null);
job.setEndTime(null);
job.setHadMoreData(false);
job.incrementRunCount();
log.info("[job.schedule] assigning " + job.getId() + " with " + job.getCopyOfTasks().size() + " tasks");
jobsStartedPerHour.mark();
for (JobTask task : job.getCopyOfTasks()) {
if (task == null || task.getState() != JobTaskState.IDLE) {
continue;
}
addToTaskQueue(task.getJobKey(), isManualKick && quiesce, false);
}
updateJob(job);
return true;
}
/* helper for SpawnMesh */
CommandTaskKick getCommandTaskKick(Job job, JobTask task) {
JobCommand jobCmd = job.getSubmitCommand();
final String expandedJob;
try {
expandedJob = expandJob(job);
} catch (TokenReplacerOverflowException e) {
return null;
}
CommandTaskKick kick = new CommandTaskKick(
task.getHostUUID(),
task.getJobKey(),
job.getPriority(),
job.getCopyOfTasks().size(),
job.getMaxRunTime() != null ? job.getMaxRunTime() * 60000 : 0,
job.getRunCount(),
expandedJob,
Strings.join(jobCmd.getCommand(), " "),
Strings.isEmpty(job.getKillSignal()) ? null : job.getKillSignal(),
job.getHourlyBackups(),
job.getDailyBackups(),
job.getWeeklyBackups(),
job.getMonthlyBackups(),
getTaskReplicaTargets(task, task.getAllReplicas())
);
kick.setRetries(job.getRetries());
return kick;
}
public class ScheduledTaskKick implements Runnable {
public String jobId;
public Collection<JobParameter> jobParameters;
public String jobConfig;
public String rawJobConfig;
public SpawnMQ spawnMQ;
public CommandTaskKick kick;
public Job job;
public JobTask task;
public ScheduledTaskKick(String jobId, Collection<JobParameter> jobParameters, String jobConfig, String rawJobConfig, SpawnMQ spawnMQ, CommandTaskKick kick, Job job, JobTask task) {
this.jobId = jobId;
this.jobParameters = jobParameters;
this.jobConfig = jobConfig;
this.rawJobConfig = rawJobConfig;
this.spawnMQ = spawnMQ;
this.kick = kick;
this.job = job;
this.task = task;
}
public void run() {
try {
if (jobConfig == null) {
jobConfig = expandJob(jobId, jobParameters, rawJobConfig);
}
kick.setConfig(jobConfig);
spawnMQ.sendJobMessage(kick);
if (debug("-task-")) {
log.info("[task.schedule] assigned " + jobId + "[" + kick.getNodeID() + "/" + (kick.getJobNodes() - 1) + "] to " + kick.getHostUuid());
}
} catch (Exception e) {
log.warn("failed to kick job " + jobId + " task " + kick.getNodeID() + " on host " + kick.getHostUuid() + ":\n" + e);
jobLock.lock();
try {
job.errorTask(task, JobTaskErrorCode.KICK_ERROR);
} finally {
jobLock.unlock();
}
}
}
}
/**
* Send a start message to a minion.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for job
* @return True if the start message is sent successfully
* @throws Exception If there is a problem scheduling a task
*/
public boolean scheduleTask(Job job, JobTask task, String config) throws Exception {
if (!schedulePrep(job)) {
return false;
}
if (task.getState() != JobTaskState.IDLE && task.getState() != JobTaskState.ERROR && task.getState() != JobTaskState.QUEUED) {
return false;
}
JobState oldState = job.getState();
if (!job.setTaskState(task, JobTaskState.ALLOCATED)) {
return false;
}
if (oldState == JobState.IDLE && job.getRunCount() <= task.getRunCount()) {
job.incrementRunCount();
job.setEndTime(null);
}
task.setRunCount(job.getRunCount());
task.setErrorCode(0);
task.setPreFailErrorCode(0);
JobCommand jobcmd = job.getSubmitCommand();
if (task.getRebalanceSource() != null && task.getRebalanceTarget() != null) {
// If a rebalance was stopped cleanly, resume it.
if (new TaskMover(this, task.getJobKey(), task.getRebalanceTarget(), task.getRebalanceSource(), false).execute(true)) {
return true;
}
}
CommandTaskKick kick = new CommandTaskKick(
task.getHostUUID(),
new JobKey(job.getId(), task.getTaskID()),
job.getPriority(),
job.getCopyOfTasks().size(),
job.getMaxRunTime() != null ? job.getMaxRunTime() * 60000 : 0,
job.getRunCount(),
null,
Strings.join(jobcmd.getCommand(), " "),
Strings.isEmpty(job.getKillSignal()) ? null : job.getKillSignal(),
job.getHourlyBackups(),
job.getDailyBackups(),
job.getWeeklyBackups(),
job.getMonthlyBackups(),
getTaskReplicaTargets(task, task.getAllReplicas())
);
kick.setRetries(job.getRetries());
// Creating a runnable to expand the job and send kick message outside of the main queue-iteration thread.
// Reason: the jobLock is held for duration of the queue-iteration and expanding some (kafka) jobs can be very
// slow. By making job expansion non-blocking we prevent other (UI) threads from waiting on zookeeper.
// Note: we make a copy of job id, parameters to ignore modifications from outside the queue-iteration thread
ArrayList<JobParameter> jobParameters = new ArrayList<>(); // deep clone of JobParameter list
for (JobParameter parameter : job.getParameters()) {
jobParameters.add(new JobParameter(parameter.getName(), parameter.getValue(), parameter.getDefaultValue()));
}
ScheduledTaskKick scheduledKick = new ScheduledTaskKick(job.getId(), jobParameters, config, getJobConfig(job.getId()), spawnMQ, kick, job, task);
expandKickExecutor.submit(scheduledKick);
return true;
}
/**
* Helper function for kickOnExistingHosts.
*
* @param task A task, typically one that is about to be kicked
* @return a List of HostStates from the task, either live or replica,
* that are unable to support a task kick (down, read-only, or scheduled to be failed)
*/
private List<HostState> hostsBlockingTaskKick(JobTask task) {
List<HostState> unavailable = new ArrayList<>();
HostState liveHost = getHostState(task.getHostUUID());
if (shouldBlockTaskKick(liveHost)) {
unavailable.add(liveHost);
}
List<JobTaskReplica> replicas = (task.getReplicas() != null ? task.getReplicas() : new ArrayList<JobTaskReplica>());
for (JobTaskReplica replica : replicas) {
HostState replicaHost = getHostState(replica.getHostUUID());
if (shouldBlockTaskKick(replicaHost)) {
unavailable.add(replicaHost);
}
}
return unavailable;
}
private boolean shouldBlockTaskKick(HostState host) {
return host == null || !host.canMirrorTasks() || host.isReadOnly() ||
hostFailWorker.getFailureState(host.getHostUuid()) == HostFailWorker.FailState.FAILING_FS_DEAD;
}
/**
* Attempt to find a host that has the capacity to run a task. Try the live host first, then any replica hosts,
* swapping onto them only if one is available and if allowed to do so.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for job
* @param timeOnQueue Time that the task has been on the queue
* @param allowSwap Whether to allow swapping to replica hosts
* @param ignoreQuiesce Whether any kicks that occur can ignore Spawn's quiesce state
* @return True if some host had the capacity to run the task and the task was sent there; false otherwise
* @throws Exception If there is a problem during task scheduling
*/
public boolean kickOnExistingHosts(Job job, JobTask task, String config, long timeOnQueue, boolean allowSwap, boolean ignoreQuiesce) throws Exception {
if (job == null || !job.isEnabled() ||
(job.getMaxSimulRunning() > 0 && job.getCountActiveTasks() >= job.getMaxSimulRunning())) {
return false;
}
boolean isNewTask = isNewTask(task);
List<HostState> unavailableHosts = hostsBlockingTaskKick(task);
if (isNewTask && !unavailableHosts.isEmpty()) {
boolean changed = replaceDownHosts(task);
if (changed) {
return false; // Reconsider the task the next time through the queue
}
}
if (!unavailableHosts.isEmpty()) {
log.warn("[taskQueuesByPriority] cannot kick " + task.getJobKey() + " because one or more of its hosts is down or scheduled to be failed: " + unavailableHosts.toString());
return false;
}
HostState liveHost = getHostState(task.getHostUUID());
if (liveHost.canMirrorTasks() && !liveHost.isReadOnly() && taskQueuesByPriority.shouldKickTaskOnHost(liveHost.getHostUuid())) {
taskQueuesByPriority.markHostKick(liveHost.getHostUuid(), false);
scheduleTask(job, task, config);
log.info("[taskQueuesByPriority] sending " + task.getJobKey() + " to " + task.getHostUUID());
return true;
} else if (allowSwap && !job.getDontAutoBalanceMe()) {
attemptKickTaskUsingSwap(job, task, isNewTask, ignoreQuiesce, timeOnQueue);
}
return false;
}
/**
* Attempt to kick a task under the assumption that the live host is unavailable.
*
* @param job The job to kick
* @param task The task to kick
* @param isNewTask Whether the task is new and has no existing data to move
* @param ignoreQuiesce Whether this task kick should ignore the quiesce state
* @param timeOnQueue How long the task has been on the queue
* @return True if the task was kicked
* @throws Exception
*/
private boolean attemptKickTaskUsingSwap(Job job, JobTask task, boolean isNewTask, boolean ignoreQuiesce, long timeOnQueue) throws Exception {
if (isNewTask) {
HostState host = findHostWithAvailableSlot(task, listHostStatus(job.getMinionType()), false);
if (host != null && swapTask(job.getId(), task.getTaskID(), host.getHostUuid(), true, ignoreQuiesce)) {
taskQueuesByPriority.markHostKick(host.getHostUuid(), false);
log.info("[taskQueuesByPriority] swapping " + task.getJobKey() + " onto " + host.getHostUuid());
return true;
}
return false;
} else if (task.getReplicas() != null) {
List<HostState> replicaHosts = new ArrayList<>();
for (JobTaskReplica replica : task.getReplicas()) {
replicaHosts.add(getHostState(replica.getHostUUID()));
}
HostState availReplica = findHostWithAvailableSlot(task, replicaHosts, false);
if (availReplica != null && swapTask(job.getId(), task.getTaskID(), availReplica.getHostUuid(), true, ignoreQuiesce)) {
taskQueuesByPriority.markHostKick(availReplica.getHostUuid(), true);
log.info("[taskQueuesByPriority] swapping " + task.getJobKey() + " onto " + availReplica.getHostUuid());
return true;
}
}
if (taskQueuesByPriority.isMigrationEnabled() && !job.getQueryConfig().getCanQuery() && !job.getDontAutoBalanceMe()) {
return attemptMigrateTask(job, task, timeOnQueue);
}
return false;
}
/**
* Select a host that can run a task
*
* @param task The task being moved
* @param hosts A collection of hosts
* @param forMigration Whether the host in question is being used for migration
* @return A suitable host that has an available task slot, if one exists; otherwise, null
*/
private HostState findHostWithAvailableSlot(JobTask task, List<HostState> hosts, boolean forMigration) {
if (hosts == null) {
return null;
}
for (HostState host : hosts) {
if (host == null || (forMigration && hostFailWorker.getFailureState(host.getHostUuid()) != HostFailWorker.FailState.ALIVE)) {
// Don't migrate onto hosts that are being failed in any capacity
continue;
}
if (host.canMirrorTasks() && !host.isReadOnly() && balancer.canReceiveNewTasks(host, false) &&
taskQueuesByPriority.shouldKickTaskOnHost(host.getHostUuid()) &&
(!forMigration || taskQueuesByPriority.shouldMigrateTaskToHost(task, host.getHostUuid()))) {
return host;
}
}
return null;
}
/**
* Consider migrating a task to a new host and run it there, subject to a limit on the overall number of such migrations
* to do per time interval and how many bytes are allowed to be migrated.
*
* @param job The job for the task to kick
* @param task The task to kick
* @param timeOnQueue How long the task has been on the queue
* @return True if the task was migrated
*/
private boolean attemptMigrateTask(Job job, JobTask task, long timeOnQueue) {
HostState target;
if (
!quiesce && // If spawn is not quiesced,
taskQueuesByPriority.checkSizeAgeForMigration(task.getByteCount(), timeOnQueue) &&
// and the task is small enough that migration is sensible
(target = findHostWithAvailableSlot(task, listHostStatus(job.getMinionType()), true)) != null)
// and there is a host with available capacity that can run the job,
{
// Migrate the task to the target host and kick it on completion
log.warn("Migrating " + task.getJobKey() + " to " + target.getHostUuid());
taskQueuesByPriority.markMigrationBetweenHosts(task.getHostUUID(), target.getHostUuid());
taskQueuesByPriority.markHostKick(target.getHostUuid(), true);
TaskMover tm = new TaskMover(this, task.getJobKey(), target.getHostUuid(), task.getHostUUID(), true);
tm.setMigration(true);
tm.execute(true);
return true;
}
return false;
}
protected boolean isNewTask(JobTask task) {
HostState liveHost = getHostState(task.getHostUUID());
return liveHost != null && !liveHost.hasLive(task.getJobKey()) && task.getFileCount() == 0 && task.getByteCount() == 0;
}
/**
* Add a jobkey to the appropriate task queue, given its priority
*
* @param jobKey The jobkey to add
* @param ignoreQuiesce Whether the task can kick regardless of Spawn's quiesce state
*/
public void addToTaskQueue(JobKey jobKey, boolean ignoreQuiesce, boolean toHead) {
Job job = getJob(jobKey.getJobUuid());
JobTask task = getTask(jobKey.getJobUuid(), jobKey.getNodeNumber());
if (job != null && task != null) {
if (balancer.hasFullDiskHost(task)) {
log.warn("[task.queue] task " + task.getJobKey() + " cannot run because one of its hosts has a full disk");
job.setTaskState(task, JobTaskState.DISK_FULL);
queueJobTaskUpdateEvent(job);
} else if (task.getState() == JobTaskState.QUEUED || job.setTaskState(task, JobTaskState.QUEUED)) {
log.info("[taskQueuesByPriority] adding " + jobKey + " to queue with ignoreQuiesce=" + ignoreQuiesce);
taskQueuesByPriority.addTaskToQueue(job.getPriority(), jobKey, ignoreQuiesce, toHead);
queueJobTaskUpdateEvent(job);
sendTaskQueueUpdateEvent();
} else {
log.warn("[task.queue] failed to add task " + jobKey + " with state " + task.getState());
}
}
}
/**
* Iterate over each queue looking for jobs that can run. By design, the queues are processed in descending order
* of priority, so we try priority 2 tasks before priority 1, etc.
*/
public void kickJobsOnQueue() {
LinkedList[] queues = null;
boolean success = false;
while (!success && !shuttingDown.get()) {
// need the job lock first
jobLock.lock();
try {
if (taskQueuesByPriority.tryLock()) {
success = true;
taskQueuesByPriority.setStoppedJob(false);
taskQueuesByPriority.updateAllHostAvailSlots(listHostStatus(null));
queues = taskQueuesByPriority.values().toArray(new LinkedList[taskQueuesByPriority.size()]);
for (LinkedList<SpawnQueueItem> queue : queues) {
iterateThroughTaskQueue(queue);
}
sendTaskQueueUpdateEvent();
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
if (!success) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
}
}
/**
* Iterate over a particular queue of same-priority tasks, kicking any that can run.
* Must be inside of a block synchronized on the queue.
*
* @param queue The queue to look over
*/
private void iterateThroughTaskQueue(LinkedList<SpawnQueueItem> queue) {
ListIterator<SpawnQueueItem> iter = queue.listIterator(0);
int skippedQuiesceCount = 0;
long now = System.currentTimeMillis();
while (iter.hasNext() && !taskQueuesByPriority.getStoppedJob()) // Terminate if out of tasks or we stopped a job, requiring a queue modification
{
SpawnQueueItem key = iter.next();
Job job = getJob(key.getJobUuid());
JobTask task = getTask(key.getJobUuid(), key.getNodeNumber());
try {
boolean kicked;
if (job == null || task == null || task.getState() != JobTaskState.QUEUED) {
log.warn("[task.queue] removing invalid task " + key);
iter.remove();
continue;
}
if (quiesce && !key.getIgnoreQuiesce()) {
skippedQuiesceCount++;
if (log.isDebugEnabled()) {
log.debug("[task.queue] skipping " + key + " because spawn is quiesced and the kick wasn't manual");
}
continue;
} else {
kicked = kickOnExistingHosts(job, task, null, now - key.getCreationTime(), true, key.getIgnoreQuiesce());
}
if (kicked) {
log.info("[task.queue] removing kicked task " + task.getJobKey());
iter.remove();
}
} catch (Exception ex) {
log.warn("[task.queue] received exception during task kick: ", ex);
if (task != null && job != null) {
job.errorTask(task, JobTaskErrorCode.KICK_ERROR);
iter.remove();
queueJobTaskUpdateEvent(job);
}
}
}
if (skippedQuiesceCount > 0) {
log.warn("[task.queue] skipped " + skippedQuiesceCount + " queued tasks because spawn is quiesced and the kick wasn't manual");
}
}
/**
* browser polling event listener
*/
public static class ClientEventListener {
public long lastSeen;
public LinkedBlockingQueue<ClientEvent> events = new LinkedBlockingQueue<ClientEvent>();
}
/**
* event queued to a browser ClientListener
*/
public static final class ClientEvent implements Codec.Codable {
private String topic;
private JSONObject message;
public ClientEvent(String topic, JSONObject message) {
this.topic = topic;
this.message = message;
}
public String topic() {
return topic;
}
public JSONObject message() {
return message;
}
public JSONObject toJSON() throws Exception {
return new JSONObject().put("topic", topic).put("message", message);
}
@Override
public boolean equals(Object o) {
if (o instanceof ClientEvent) {
ClientEvent ce = (ClientEvent) o;
return ce.topic == topic && ce.message == message;
}
return false;
}
@Override
public int hashCode() {
return message.hashCode();
}
}
public WebSocketManager getWebSocketManager() {
return this.webSocketManager;
}
public boolean areAlertsEnabled() {
String alertsEnabled = null;
try {
alertsEnabled = spawnDataStore.get(SPAWN_COMMON_ALERT_PATH);
} catch (Exception ex) {
log.warn("Unable to read alerts status due to : " + ex.getMessage());
}
return alertsEnabled == null || alertsEnabled.equals("") || alertsEnabled.equals("true");
}
public void disableAlerts() throws Exception {
spawnDataStore.put(SPAWN_COMMON_ALERT_PATH, "false");
this.jobAlertRunner.disableAlerts();
}
public void enableAlerts() throws Exception {
spawnDataStore.put(SPAWN_COMMON_ALERT_PATH, "true");
this.jobAlertRunner.enableAlerts();
}
public List<String> getJobsToAutobalance() {
List<String> rv = new ArrayList<String>();
List<Job> autobalanceJobs = balancer.getJobsToAutobalance(listHostStatus(null));
if (autobalanceJobs == null) {
return rv;
}
for (Job job : autobalanceJobs) {
if (job.getId() != null) {
rv.add(job.getId());
}
}
return rv;
}
public long getTaskTrueSize(String jobId, int node) {
return balancer.getTaskTrueSize(getTask(jobId, node));
}
public void toggleHosts(String hosts, boolean disable) {
if (hosts != null) {
String[] hostsArray = hosts.split(",");
for (String host : hostsArray) {
if (host.isEmpty()) {
continue;
}
boolean changed;
synchronized (disabledHosts) {
changed = disable ? disabledHosts.add(host) : disabledHosts.remove(host);
}
if (changed) {
updateToggledHosts(host, disable);
}
}
writeState();
}
}
public void updateToggledHosts(String id, boolean disable) {
for (HostState host : listHostStatus(null)) {
if (id.equals(host.getHost()) || id.equals(host.getHostUuid())) {
host.setDisabled(disable);
sendHostUpdateEvent(host);
updateHostState(host);
}
}
}
/**
* simple settings wrapper allows changes to Spawn
*/
public class Settings {
public String getDebug() {
return debug;
}
public void setDebug(String debug) {
Spawn.this.debug = debug;
writeState();
}
public String getQueryHost() {
return queryHost;
}
public String getSpawnHost() {
return spawnHost;
}
public void setQueryHost(String queryHost) {
Spawn.this.queryHost = queryHost;
writeState();
}
public void setSpawnHost(String spawnHost) {
Spawn.this.spawnHost = spawnHost;
writeState();
}
public boolean getQuiesced() {
return quiesce;
}
public void setQuiesced(boolean quiesced) {
quiesceCount.clear();
if (quiesced) {
quiesceCount.inc();
}
Spawn.this.quiesce = quiesced;
writeState();
}
public String getDisabled() {
synchronized (disabledHosts) {
return Strings.join(disabledHosts.toArray(), ",");
}
}
public void setDisabled(String disabled) {
synchronized (disabledHosts) {
disabledHosts.clear();
disabledHosts.addAll(Arrays.asList(disabled.split(",")));
}
}
public JSONObject toJSON() throws JSONException {
return new JSONObject().put("debug", debug).put("quiesce", quiesce).put("queryHost", queryHost).put("spawnHost", spawnHost).put("disabled", getDisabled());
}
}
public void updateSpawnBalancerConfig(SpawnBalancerConfig newConfig) {
spawnState.balancerConfig = newConfig;
balancer.setConfig(newConfig);
}
public void writeSpawnBalancerConfig() {
try {
spawnDataStore.put(SPAWN_BALANCE_PARAM_PATH, new String(codec.encode(spawnState.balancerConfig)));
} catch (Exception e) {
log.warn("Warning: failed to persist SpawnBalancer parameters: ", e);
}
}
protected final void loadSpawnBalancerConfig() {
String configString = spawnDataStore.get(SPAWN_BALANCE_PARAM_PATH);
if (configString != null && !configString.isEmpty()) {
SpawnBalancerConfig loadedConfig = new SpawnBalancerConfig();
try {
codec.decode(loadedConfig, configString.getBytes());
updateSpawnBalancerConfig(loadedConfig);
} catch (Exception e) {
log.warn("Warning: failed to decode SpawnBalancerConfig: ", e);
}
}
}
public SpawnState getSpawnState() {
return spawnState;
}
public SpawnDataStore getSpawnDataStore() {
return spawnDataStore;
}
@VisibleForTesting
protected static class SpawnState implements Codec.Codable {
final ConcurrentMap<String, JobMacro> macros = new ConcurrentHashMapV8<>();
final ConcurrentMap<String, JobCommand> commands = new ConcurrentHashMapV8<>();
final ConcurrentMap<String, Job> jobs = new ConcurrentHashMapV8<>();
final DirectedGraph<String> jobDependencies = new DirectedGraph();
SpawnBalancerConfig balancerConfig = new SpawnBalancerConfig();
}
}
| hydra-main/src/main/java/com/addthis/hydra/job/Spawn.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.job;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedSet;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.text.ParseException;
import com.addthis.basis.net.HttpUtil;
import com.addthis.basis.util.Bytes;
import com.addthis.basis.util.Files;
import com.addthis.basis.util.JitterClock;
import com.addthis.basis.util.Parameter;
import com.addthis.basis.util.Strings;
import com.addthis.basis.util.TokenReplacerOverflowException;
import com.addthis.bark.ZkClientFactory;
import com.addthis.bark.ZkHelpers;
import com.addthis.codec.Codec;
import com.addthis.codec.CodecJSON;
import com.addthis.hydra.job.backup.ScheduledBackupType;
import com.addthis.hydra.job.mq.CommandTaskDelete;
import com.addthis.hydra.job.mq.CommandTaskKick;
import com.addthis.hydra.job.mq.CommandTaskReplicate;
import com.addthis.hydra.job.mq.CommandTaskRevert;
import com.addthis.hydra.job.mq.CommandTaskStop;
import com.addthis.hydra.job.mq.CoreMessage;
import com.addthis.hydra.job.mq.HostCapacity;
import com.addthis.hydra.job.mq.HostMessage;
import com.addthis.hydra.job.mq.HostState;
import com.addthis.hydra.job.mq.JobKey;
import com.addthis.hydra.job.mq.ReplicaTarget;
import com.addthis.hydra.job.mq.StatusTaskBackup;
import com.addthis.hydra.job.mq.StatusTaskBegin;
import com.addthis.hydra.job.mq.StatusTaskCantBegin;
import com.addthis.hydra.job.mq.StatusTaskEnd;
import com.addthis.hydra.job.mq.StatusTaskPort;
import com.addthis.hydra.job.mq.StatusTaskReplica;
import com.addthis.hydra.job.mq.StatusTaskReplicate;
import com.addthis.hydra.job.mq.StatusTaskRevert;
import com.addthis.hydra.job.spawn.JobAlert;
import com.addthis.hydra.job.spawn.JobAlertRunner;
import com.addthis.hydra.job.spawn.SpawnService;
import com.addthis.hydra.job.store.DataStoreUtil;
import com.addthis.hydra.job.store.JobStore;
import com.addthis.hydra.job.store.SpawnDataStore;
import com.addthis.hydra.query.AliasBiMap;
import com.addthis.hydra.query.WebSocketManager;
import com.addthis.hydra.task.run.TaskExitState;
import com.addthis.hydra.util.DirectedGraph;
import com.addthis.hydra.util.SettableGauge;
import com.addthis.maljson.JSONArray;
import com.addthis.maljson.JSONException;
import com.addthis.maljson.JSONObject;
import com.addthis.meshy.MeshyClient;
import com.addthis.meshy.service.file.FileReference;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
import com.yammer.metrics.core.Gauge;
import com.yammer.metrics.core.Meter;
import org.I0Itec.zkclient.ZkClient;
import org.codehaus.jackson.map.ObjectMapper;
import org.eclipse.jetty.server.Server;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.MINION_DEAD_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.MINION_UP_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_BALANCE_PARAM_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_ALERT_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_COMMAND_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_COMMON_MACRO_PATH;
import static com.addthis.hydra.job.store.SpawnDataStoreKeys.SPAWN_QUEUE_PATH;
import jsr166e.ConcurrentHashMapV8;
/**
* manages minions running on remote notes. runs master http server to
* communicate with and control those instances.
*/
public class Spawn implements Codec.Codable {
private static Logger log = LoggerFactory.getLogger(Spawn.class);
private static String httpHost = Parameter.value("spawn.localhost");
private static String clusterName = Parameter.value("cluster.name", "localhost");
private static String queryHttpHost = Parameter.value("spawn.queryhost");
private static int webPort = Parameter.intValue("spawn.http.port", 5050);
private static int requestHeaderBufferSize = Parameter.intValue("spawn.http.bufsize", 8192);
private static int hostStatusRequestInterval = Parameter.intValue("spawn.status.interval", 10000);
private static int queueKickInterval = Parameter.intValue("spawn.queue.kick.interval", 6000);
private static String debugOverride = Parameter.value("spawn.debug");
private static final boolean useStructuredLogger = Parameter.boolValue("spawn.logger.bundle.enable",
clusterName.equals("localhost")); // default to true if-and-only-if we are running local stack
private static final Codec codec = new CodecJSON();
private static final Counter quiesceCount = Metrics.newCounter(Spawn.class, "quiesced");
private static final SettableGauge<Integer> runningTaskCount = SettableGauge.newSettableGauge(Spawn.class, "runningTasks", 0);
private static final SettableGauge<Integer> queuedTaskCount = SettableGauge.newSettableGauge(Spawn.class, "queuedTasks", 0);
private static final SettableGauge<Integer> failTaskCount = SettableGauge.newSettableGauge(Spawn.class, "failedTasks", 0);
private static final Meter tasksStartedPerHour = Metrics.newMeter(Spawn.class, "tasksStartedPerHour", "tasksStartedPerHour", TimeUnit.HOURS);
private static final Meter tasksCompletedPerHour = Metrics.newMeter(Spawn.class, "tasksCompletedPerHour", "tasksCompletedPerHour", TimeUnit.HOURS);
private static final SettableGauge<Integer> runningJobCount = SettableGauge.newSettableGauge(Spawn.class, "runningJobs", 0);
private static final SettableGauge<Integer> queuedJobCount = SettableGauge.newSettableGauge(Spawn.class, "queuedJobs", 0);
private static final SettableGauge<Integer> failJobCount = SettableGauge.newSettableGauge(Spawn.class, "failedJobs", 0);
private static final SettableGauge<Integer> hungJobCount = SettableGauge.newSettableGauge(Spawn.class, "hungJobs", 0);
private static final Meter jobsStartedPerHour = Metrics.newMeter(Spawn.class, "jobsStartedPerHour", "jobsStartedPerHour", TimeUnit.HOURS);
private static final Meter jobsCompletedPerHour = Metrics.newMeter(Spawn.class, "jobsCompletedPerHour", "jobsCompletedPerHour", TimeUnit.HOURS);
public static final String SPAWN_DATA_DIR = Parameter.value("SPAWN_DATA_DIR", "./data");
public static final String SPAWN_STRUCTURED_LOG_DIR = Parameter.value("spawn.logger.bundle.dir", "./log/spawn-stats");
// thread pool for running chore actions that we do not want running in the main thread of Spawn
private final ExecutorService choreExecutor = MoreExecutors.getExitingExecutorService(
new ThreadPoolExecutor(1, 4, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(), new ThreadFactoryBuilder().setNameFormat("choreExecutor-%d").build()));
// thread pool for expanding jobs and sending kick messages (outside of the main application threads)
// - thread pool size of 10 chosen somewhat arbitrarily, most job expansions should be nearly instantaneous
// - max queue size of 5000 was chosen as a generous upper bound for how many tasks may be queued at once (since the number of scheduled kicks is limited by queue size)
private final LinkedBlockingQueue<Runnable> expandKickQueue = new LinkedBlockingQueue<>(5000);
private final ExecutorService expandKickExecutor = MoreExecutors.getExitingExecutorService(
new ThreadPoolExecutor(10, 10, 0L, TimeUnit.MILLISECONDS, expandKickQueue,
new ThreadFactoryBuilder().setNameFormat("jobExpander-%d").build()));
private final ScheduledExecutorService scheduledExecutor = MoreExecutors.getExitingScheduledExecutorService(
new ScheduledThreadPoolExecutor(2, new ThreadFactoryBuilder().setNameFormat("spawnScheduledTask-%d").build()));
private final Gauge<Integer> expandQueueGauge = Metrics.newGauge(Spawn.class, "expandKickExecutorQueue", new Gauge<Integer>() {
public Integer value() {
return expandKickQueue.size();
}
});
private final HostFailWorker hostFailWorker;
public static void main(String args[]) throws Exception {
Spawn spawn = new Spawn(
new File(args.length > 0 ? args[0] : "etc"),
new File(args.length > 1 ? args[1] : "web")
);
new SpawnService(spawn).start();
}
private final File dataDir;
private final ConcurrentHashMap<String, ClientEventListener> listeners;
@Codec.Set(codable = true)
private String uuid;
@Codec.Set(codable = true)
private String debug;
@Codec.Set(codable = true)
private String queryHost;
@Codec.Set(codable = true)
private String spawnHost;
@Codec.Set(codable = true)
private int queryPort = 2222;
@Codec.Set(codable = true)
private boolean quiesce;
@Codec.Set(codable = true)
private final HashSet<String> disabledHosts = new HashSet<>();
private int choreCleanerInterval = Parameter.intValue("spawn.chore.interval", 10000);
private static final int CHORE_TTL = Parameter.intValue("spawn.chore.ttl", 60 * 60 * 24 * 1000);
private static final int TASK_QUEUE_DRAIN_INTERVAL = Parameter.intValue("task.queue.drain.interval", 500);
private static final boolean ENABLE_JOB_STORE = Parameter.boolValue("job.store.enable", true);
private static final boolean ENABLE_JOB_FIXDIRS_ONCOMPLETE = Parameter.boolValue("job.fixdirs.oncomplete", true);
private final ConcurrentHashMap<String, HostState> monitored;
private final SpawnState spawnState = new SpawnState();
private final SpawnMesh spawnMesh;
private final SpawnFormattedLogger spawnFormattedLogger;
private ZkClient zkClient;
private SpawnMQ spawnMQ;
private Server jetty;
private JobConfigManager jobConfigManager;
private SetMembershipListener minionMembers;
private SetMembershipListener deadMinionMembers;
private AliasBiMap aliasBiMap;
private boolean useZk = true;
private final String stateFilePath = Parameter.value("spawn.state.file", "spawn.state");
private Gauge<Integer> minionsDown = Metrics.newGauge(Spawn.class, "minionsDown", new Gauge<Integer>() {
public Integer value() {
int total = 0;
if (monitored != null) {
synchronized (monitored) {
total = monitored.size();
}
}
int up = minionMembers == null ? 0 : minionMembers.getMemberSetSize();
int dead = deadMinionMembers == null ? 0 : deadMinionMembers.getMemberSetSize();
return total - up - dead;
}
});
private SpawnBalancer balancer;
private SpawnQueuesByPriority taskQueuesByPriority = new SpawnQueuesByPriority();
private volatile int lastQueueSize = 0;
private final Lock jobLock = new ReentrantLock();
private final AtomicBoolean shuttingDown = new AtomicBoolean(false);
private final LinkedBlockingQueue<String> jobUpdateQueue = new LinkedBlockingQueue<>();
private final SpawnJobFixer spawnJobFixer = new SpawnJobFixer(this);
private JobAlertRunner jobAlertRunner;
private JobStore jobStore;
private SpawnDataStore spawnDataStore;
//To track web socket connections
private final WebSocketManager webSocketManager = new WebSocketManager();
/**
* default constructor used for testing purposes only
*/
@VisibleForTesting
public Spawn() throws Exception {
this(false);
}
@VisibleForTesting
public Spawn(boolean zk) throws Exception {
this.dataDir = Files.initDirectory(SPAWN_DATA_DIR);
this.listeners = new ConcurrentHashMap<>();
this.monitored = new ConcurrentHashMap<>();
this.useZk = zk;
this.spawnFormattedLogger = useStructuredLogger ?
SpawnFormattedLogger.createFileBasedLogger(new File(SPAWN_STRUCTURED_LOG_DIR)) :
SpawnFormattedLogger.createNullLogger();
if (zk) {
log.info("[init] starting zkclient, config manager, and listening for minions");
this.zkClient = ZkClientFactory.makeStandardClient();
this.spawnDataStore = DataStoreUtil.makeSpawnDataStore(zkClient);
this.jobConfigManager = new JobConfigManager(this.spawnDataStore);
this.minionMembers = new SetMembershipListener(MINION_UP_PATH, true);
this.deadMinionMembers = new SetMembershipListener(MINION_DEAD_PATH, false);
}
this.hostFailWorker = new HostFailWorker(this);
this.balancer = new SpawnBalancer(this);
this.spawnMesh = new SpawnMesh(this);
}
private Spawn(File dataDir, File webDir) throws Exception {
getSettings().setQuiesced(quiesce);
this.dataDir = Files.initDirectory(dataDir);
this.monitored = new ConcurrentHashMap<>();
this.listeners = new ConcurrentHashMap<>();
this.spawnFormattedLogger = useStructuredLogger ?
SpawnFormattedLogger.createFileBasedLogger(new File(SPAWN_STRUCTURED_LOG_DIR)) :
SpawnFormattedLogger.createNullLogger();
this.zkClient = ZkClientFactory.makeStandardClient();
this.spawnDataStore = DataStoreUtil.makeSpawnDataStore(zkClient);
File statefile = new File(dataDir, stateFilePath);
if (statefile.exists() && statefile.isFile()) {
codec.decode(this, Files.read(statefile));
}
this.queryHost = (queryHttpHost != null ? queryHttpHost : InetAddress.getLocalHost().getHostAddress()) + ":" + queryPort;
this.spawnHost = (httpHost != null ? httpHost : InetAddress.getLocalHost().getHostAddress()) + ":" + webPort;
if (uuid == null) {
uuid = UUID.randomUUID().toString();
log.warn("[init] uuid was null, creating new one: " + uuid);
}
if (debugOverride != null) {
debug = debugOverride;
}
// look for local object to import
log.info("[init] beginning to load stats from data store");
loadMacros();
loadCommands();
loadSpawnQueue();
this.jobConfigManager = new JobConfigManager(spawnDataStore);
// fix up null pointers
for (Job job : spawnState.jobs.values()) {
if (job.getSubmitTime() == null) {
job.setSubmitTime(System.currentTimeMillis());
}
}
loadJobs();
// register jvm shutdown hook to clean up resources
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
runtimeShutdownHook();
}
});
// connect to message broker or fail
log.info("[init] connecting to message queue");
this.spawnMQ = new SpawnMQImpl(zkClient, this);
this.minionMembers = new SetMembershipListener(MINION_UP_PATH, true);
this.deadMinionMembers = new SetMembershipListener(MINION_DEAD_PATH, false);
this.aliasBiMap = new AliasBiMap(spawnDataStore);
aliasBiMap.loadCurrentValues();
hostFailWorker = new HostFailWorker(this);
balancer = new SpawnBalancer(this);
loadSpawnBalancerConfig();
this.spawnMQ.connectToMQ(uuid);
// request hosts to send their status
Timer timer = new Timer(true);
timer.schedule(new TimerTask() {
@Override
public void run() {
requestHostsUpdate();
}
}, hostStatusRequestInterval, hostStatusRequestInterval);
Timer taskQueueTimer = new Timer(true);
taskQueueTimer.schedule(new TimerTask() {
@Override
public void run() {
kickJobsOnQueue();
writeSpawnQueue();
}
}, queueKickInterval, queueKickInterval);
Timer taskUpdateQueueDrainer = new Timer(true);
taskUpdateQueueDrainer.schedule(new TimerTask() {
@Override
public void run() {
drainJobTaskUpdateQueue();
}
}, TASK_QUEUE_DRAIN_INTERVAL, TASK_QUEUE_DRAIN_INTERVAL);
//Start JobAlertManager
this.jobAlertRunner = new JobAlertRunner(this);
// start job scheduler
scheduledExecutor.scheduleWithFixedDelay(new UpdateEventRunnable(), 0, 1, TimeUnit.MINUTES);
scheduledExecutor.scheduleWithFixedDelay(new JobRekickTask(), 0, 500, TimeUnit.MILLISECONDS);
// start http commands listener(s)
startSpawnWeb(dataDir, webDir);
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
try {
jetty.stop();
} catch (Exception e) {
e.printStackTrace();
}
}
});
// connect to mesh
this.spawnMesh = new SpawnMesh(this);
balancer.startAutobalanceTask();
balancer.startTaskSizePolling();
if (ENABLE_JOB_STORE) {
jobStore = new JobStore(new File(dataDir, "jobstore"));
}
}
private void writeState() {
try {
File statefile = new File(dataDir, stateFilePath);
Files.write(statefile, codec.encode(this), false);
} catch (Exception e) {
log.warn("WARNING: failed to write spawn state to log file at " + stateFilePath);
}
}
public void markHostsForFailure(String hostId, boolean fileSystemDead) {
hostFailWorker.markHostsToFail(hostId, fileSystemDead);
}
public void unmarkHostsForFailure(String hostIds) {
hostFailWorker.removeHostsForFailure(hostIds);
}
public HostFailWorker getHostFailWorker() {
return hostFailWorker;
}
public SpawnBalancer getSpawnBalancer() {
return balancer;
}
public static String getHttpHost() {
return httpHost;
}
public void acquireJobLock() {
jobLock.lock();
}
public void releaseJobLock() {
jobLock.unlock();
}
private void startSpawnWeb(File dataDir, File webDir) throws Exception {
log.info("[init] starting http server");
SpawnHttp http = new SpawnHttp(this, webDir);
new SpawnManager().register(http);
jetty = new Server(webPort);
jetty.getConnectors()[0].setRequestBufferSize(65535);
jetty.getConnectors()[0].setRequestHeaderSize(requestHeaderBufferSize);
jetty.setHandler(http);
jetty.start();
}
public String getUuid() {
return uuid;
}
public MeshyClient getMeshyClient() {
return spawnMesh.getClient();
}
public ZkClient getZkClient() {
return zkClient;
}
private void closeZkClients() {
if (spawnDataStore != null) {
spawnDataStore.close();
}
if (zkClient != null) {
zkClient.close();
}
}
public void setSpawnMQ(SpawnMQ spawnMQ) {
this.spawnMQ = spawnMQ;
}
private void loadMacros() throws Exception {
Map<String, String> loadedMacros = spawnDataStore.getAllChildren(SPAWN_COMMON_MACRO_PATH);
if (loadedMacros == null) {
return;
}
for (Entry<String, String> macroEntry : loadedMacros.entrySet()) {
String jsonMacro = macroEntry.getValue();
if (jsonMacro != null && !jsonMacro.equals("null") && !jsonMacro.isEmpty()) {
JobMacro macro = new JobMacro();
codec.decode(macro, jsonMacro.getBytes());
putMacro(macroEntry.getKey(), macro, false);
}
}
}
// TODO: It should be possible to reduce duplication between how commands and macros are handled.
@VisibleForTesting
protected void loadCommands() throws Exception {
Map<String, String> loadedCommands = spawnDataStore.getAllChildren(SPAWN_COMMON_COMMAND_PATH);
if (loadedCommands == null) {
return;
}
for (Entry<String, String> commandEntry : loadedCommands.entrySet()) {
String jsonCommand = commandEntry.getValue();
if (jsonCommand != null && !jsonCommand.equals("null") && !jsonCommand.isEmpty()) {
JobCommand command = new JobCommand();
codec.decode(command, jsonCommand.getBytes());
putCommand(commandEntry.getKey(), command, false);
}
}
}
@VisibleForTesting
protected void loadSpawnQueue() throws Exception {
String queueFromZk = spawnDataStore.get(SPAWN_QUEUE_PATH);
if (queueFromZk == null) {
return;
}
try {
taskQueuesByPriority = new ObjectMapper().readValue(queueFromZk, SpawnQueuesByPriority.class);
} catch (Exception ex) {
log.warn("[task.queue] exception during spawn queue deserialization: ", ex);
}
}
protected void writeSpawnQueue() {
ObjectMapper om = new ObjectMapper();
try {
taskQueuesByPriority.lock();
try {
spawnDataStore.put(SPAWN_QUEUE_PATH, new String(om.writeValueAsBytes(taskQueuesByPriority)));
} finally {
taskQueuesByPriority.unlock();
}
} catch (Exception ex) {
log.warn("[task.queue] exception during spawn queue serialization: " + ex, ex);
}
}
@VisibleForTesting
protected void loadJobs() {
if (jobConfigManager != null) {
jobLock.lock();
try {
for (IJob iJob : jobConfigManager.getJobs().values()) {
if (iJob != null) {
putJobInSpawnState(new Job(iJob));
}
}
} finally {
jobLock.unlock();
}
}
Thread loadDependencies = new Thread() {
@Override
public void run() {
Set<String> jobIds = spawnState.jobs.keySet();
for (String jobId : jobIds) {
IJob job = getJob(jobId);
if (job != null) {
updateJobDependencies(job);
}
}
}
};
loadDependencies.setDaemon(true);
loadDependencies.start();
}
// -------------------- BEGIN API ---------------------
public Settings getSettings() {
return new Settings();
}
public Map<String, List<String>> getAliases() {
return aliasBiMap.viewAliasMap();
}
public void addAlias(String alias, List<String> jobs) {
if (jobs.size() > 0) {
aliasBiMap.putAlias(alias, jobs);
} else {
log.warn("Ignoring empty jobs addition for alias: " + alias);
}
}
public void deleteAlias(String alias) {
aliasBiMap.deleteAlias(alias);
}
public ClientEventListener getClientEventListener(String id) {
ClientEventListener listener = listeners.get(id);
if (listener == null) {
listener = new ClientEventListener();
listeners.put(id, listener);
}
listener.lastSeen = System.currentTimeMillis();
return listener;
}
public HostState getHostState(String hostUuid) {
synchronized (monitored) {
return monitored.get(hostUuid);
}
}
public HostState markHostStateDead(String hostUUID) {
HostState state = getHostState(hostUUID);
if (state != null) {
state.setDead(true);
state.setUpdated();
if (useZk) {
// delete minion state
ZkHelpers.deletePath(zkClient, Minion.MINION_ZK_PATH + hostUUID);
ZkHelpers.makeSurePersistentPathExists(zkClient, MINION_DEAD_PATH + "/" + hostUUID);
}
sendHostUpdateEvent(state);
updateHostState(state);
}
return state;
}
protected void updateHostState(HostState state) {
synchronized (monitored) {
if (deadMinionMembers == null || !deadMinionMembers.getMemberSet().contains(state.getHostUuid())) {
if (log.isDebugEnabled()) {
log.debug("Updating host state for : " + state.getHost());
}
monitored.put(state.getHostUuid(), state);
}
}
}
/**
* List all hosts belonging to a particular minion type.
*
* @param minionType The minion type to find. If null, return all hosts.
* @return A list of hoststates
*/
public List<HostState> listHostStatus(String minionType) {
synchronized (monitored) {
Set<String> availableMinions = minionMembers == null ? ImmutableSet.<String>of() : minionMembers.getMemberSet();
Set<String> deadMinions = deadMinionMembers == null ? ImmutableSet.<String>of() : deadMinionMembers.getMemberSet();
ArrayList<HostState> allMinions = new ArrayList<>();
for (HostState minion : monitored.values()) {
if (availableMinions.contains(minion.getHostUuid()) && !deadMinions.contains(minion.getHostUuid())) {
minion.setUp(true);
} else {
minion.setUp(false);
}
if (minionType == null || minion.hasType(minionType)) {
allMinions.add(minion);
}
}
return allMinions;
}
}
public Set<String> listMinionTypes() {
Set<String> rv = new HashSet<>();
synchronized (monitored) {
for (HostState minion : monitored.values()) {
rv.add(minion.getMinionTypes());
}
}
return rv;
}
public Collection<String> listAvailableHostIds() {
return minionMembers.getMemberSet();
}
public void requestHostsUpdate() {
try {
spawnMQ.sendControlMessage(new HostState(HostMessage.ALL_HOSTS));
} catch (Exception e) {
log.warn("unable to request host state update: " + e);
}
}
public Set<String> getDataSources(String jobId) {
HashSet<String> dataSources = new HashSet<>();
Job job = this.getJob(jobId);
if (job == null || job.getParameters() == null) {
return dataSources;
}
jobLock.lock();
try {
for (JobParameter param : job.getParameters()) {
String value = param.getValue();
if (Strings.isEmpty(value)) {
value = param.getDefaultValue();
}
if (value != null && spawnState.jobs.containsKey(value)) {
dataSources.add(value);
}
}
} finally {
jobLock.unlock();
}
return dataSources;
}
public DirectedGraph<String> getJobDependencies() {
return spawnState.jobDependencies;
}
//* returns the jobs that depend on a given job. dependency is established if the job's ID is used as a job parameter
public Collection<Job> listDependentJobs(String jobId) {
ArrayList<Job> dependents = new ArrayList<>();
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
for (JobParameter param : job.getParameters()) {
if (param.getValue() != null && param.getValue().equals(jobId)) {
dependents.add(job);
break;
}
}
}
return dependents;
} finally {
jobLock.unlock();
}
}
public void buildDependencyFlowGraph(FlowGraph graph, String jobId) {
graph.addFlow(jobId);
Collection<Job> jobDeps = this.listDependentJobs(jobId);
for (Job jobDep : jobDeps) {
graph.addFlow(jobId, jobDep.getId());
buildDependencyFlowGraph(graph, jobDep.getId());
}
}
/**
* Gets the backup times for a given job and node of all backup types by using MeshyClient. If the nodeId is -1 it will
* get the backup times for all nodes.
*
* @return Set of date time mapped by backup type in reverse chronological order
* @throws IOException thrown if mesh client times out, ParseException thrown if filename does not meet valid format
*/
public Map<ScheduledBackupType, SortedSet<Long>> getJobBackups(String jobUUID, int nodeId) throws IOException, ParseException {
Map<ScheduledBackupType, SortedSet<Long>> fileDates = new HashMap<ScheduledBackupType, SortedSet<Long>>();
for (ScheduledBackupType backupType : ScheduledBackupType.getBackupTypes().values()) { //ignore types with symlink (like gold)
//if(backupType.getSymlinkName()==null)
//{
final String typePrefix = "*/" + jobUUID + "/" + ((nodeId < 0) ? "*" : Integer.toString(nodeId)) + "/" + backupType.getPrefix() + "*";
List<FileReference> files = new ArrayList<FileReference>(spawnMesh.getClient().listFiles(new String[]{typePrefix}));//meshyClient.listFiles(new String[] {typePrefix}));
fileDates.put(backupType, new TreeSet<Long>(Collections.reverseOrder()));
for (FileReference file : files) {
String filename = file.name.split("/")[4];
fileDates.get(backupType).add(backupType.parseDateFromName(filename).getTime());
}
//}
}
return fileDates;
}
public boolean isSpawnMeshAvailable() {
return spawnMesh.getClient() != null;
}
public void deleteHost(String hostuuid) {
synchronized (monitored) {
HostState state = monitored.remove(hostuuid);
if (state != null) {
log.info("Deleted host " + hostuuid);
sendHostUpdateEvent("host.delete", state);
} else {
log.warn("Attempted to delete host " + hostuuid + "But it was not found");
}
}
}
public Collection<Job> listJobs() {
ArrayList<Job> clones = new ArrayList<>(spawnState.jobs.size());
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
clones.add(job);
}
return clones;
} finally {
jobLock.unlock();
}
}
public Collection<Job> listJobsConcurrentImmutable() {
return Collections.unmodifiableCollection(spawnState.jobs.values());
}
public JSONArray getTaskQueueAsJSONArray() {
taskQueuesByPriority.lock();
try {
JSONArray jsonArray = new JSONArray();
for (Integer priority : taskQueuesByPriority.keySet()) {
Map<String, Object> jobToTaskMap = new HashMap<>();
LinkedList<SpawnQueueItem> jobQueue = taskQueuesByPriority.get(priority);
for (JobKey jobkey : jobQueue) {
JobTask jobtask = getTask(jobkey.getJobUuid(), jobkey.getNodeNumber());
String hostStr = "";
hostStr += jobtask.getHostUUID() + " ";
for (JobTaskReplica jobTaskReplica : jobtask.getReplicas()) {
hostStr += jobTaskReplica.getHostUUID() + " ";
}
HashMap<String, Object> taskHostMap = (HashMap<String, Object>) jobToTaskMap.get(jobkey.getJobUuid());
if (taskHostMap == null) {
taskHostMap = new HashMap<>();
}
taskHostMap.put(Integer.toString(jobtask.getTaskID()), hostStr);
jobToTaskMap.put(jobkey.getJobUuid(), taskHostMap);
}
JSONObject jobResult = new JSONObject(jobToTaskMap);
jsonArray.put(jobResult);
}
return jsonArray;
} finally {
taskQueuesByPriority.unlock();
}
}
public int getTaskQueuedCount() {
return lastQueueSize;
}
public Job getJob(String jobUUID) {
if (jobUUID == null) {
return null;
}
jobLock.lock();
try {
return spawnState.jobs.get(jobUUID);
} finally {
jobLock.unlock();
}
}
public void setJobConfig(String jobUUID, String config) throws Exception {
jobConfigManager.setConfig(jobUUID, config);
}
public String getJobConfig(String jobUUID) {
if (jobUUID == null) {
return null;
}
jobLock.lock();
try {
return jobConfigManager.getConfig(jobUUID);
} finally {
jobLock.unlock();
}
}
public Job putJobInSpawnState(Job job) {
if (job == null) {
return null;
}
// Null out the job config before inserting to reduce the amount stored in memory.
// Calling getJob will fill it back in -- or call jobConfigManager.getConfig(id)
job.setConfig(null);
return spawnState.jobs.put(job.getId(), job);
}
public Job getJob(JobKey jobKey) {
String jobUUID = jobKey.getJobUuid();
return getJob(jobUUID);
}
public JSONArray getJobHistory(String jobId) {
return jobStore != null ? jobStore.getHistory(jobId) : new JSONArray();
}
public String getJobHistoricalConfig(String jobId, String commitId) {
return jobStore != null ? jobStore.fetchHistoricalConfig(jobId, commitId) : null;
}
public String diff(String jobId, String commitId) {
return jobStore != null ? jobStore.getDiff(jobId, commitId) : null;
}
public Job createJob(String creator, int taskCount, Collection<String> taskHosts, String minionType, String command) throws Exception {
jobLock.lock();
try {
Job job = new Job(UUID.randomUUID().toString(), creator != null ? creator : "anonymous");
job.setOwner(job.getCreator());
job.setState(JobState.IDLE);
job.setCommand(command);
job.setDailyBackups(1);
job.setWeeklyBackups(1);
job.setMonthlyBackups(1);
job.setHourlyBackups(1);
job.setReplicas(1);
job.setMinionType(minionType);
List<HostState> hostStates = getOrCreateHostStateList(minionType, taskHosts);
List<JobTask> tasksAssignedToHosts = balancer.generateAssignedTasksForNewJob(job.getId(), taskCount, hostStates);
job.setTasks(tasksAssignedToHosts);
for (JobTask task : tasksAssignedToHosts) {
HostState host = getHostState(task.getHostUUID());
if (host == null) {
throw new Exception("Unable to allocate job tasks because no suitable host was found");
}
host.addJob(job.getId());
}
putJobInSpawnState(job);
if (jobConfigManager != null) {
jobConfigManager.addJob(job);
}
submitConfigUpdate(job.getId(), null);
return job;
} finally {
jobLock.unlock();
}
}
public boolean synchronizeJobState(String jobUUID) {
if (jobUUID == null) {
throw new NullPointerException("missing job uuid");
}
if (jobUUID.equals("ALL")) {
Collection<Job> jobList = listJobs();
for (Job job : jobList) {
if (!synchronizeSingleJob(job.getId())) {
log.warn("Stopping synchronize all jobs to to failure synchronizing job: " + job.getId());
return false;
}
}
return true;
} else {
return synchronizeSingleJob(jobUUID);
}
}
private boolean synchronizeSingleJob(String jobUUID) {
Job job = getJob(jobUUID);
if (job == null) {
log.warn("[job.synchronize] job uuid " + jobUUID + " not found");
return false;
}
ObjectMapper mapper = new ObjectMapper();
for (JobTask task : job.getCopyOfTasks()) {
String taskHost = task.getHostUUID();
if (deadMinionMembers.getMemberSet().contains(taskHost)) {
log.warn("task is currently assigned to a dead minion, need to check job: " + job.getId() + " host/node:" + task.getHostUUID() + "/" + task.getTaskID());
continue;
}
String hostStateString = ZkHelpers.readData(zkClient, Minion.MINION_ZK_PATH + taskHost);
HostState hostState;
try {
hostState = mapper.readValue(hostStateString, HostState.class);
} catch (IOException e) {
log.warn("Unable to deserialize host state for host: " + hostStateString + " serialized string was\n" + hostStateString);
return false;
}
boolean matched = matchJobNodeAndId(jobUUID, task, hostState.getRunning(), hostState.getStopped(), hostState.getQueued());
if (!matched) {
log.warn("Spawn thinks job: " + jobUUID + " node:" + task.getTaskID() + " is running on host: " + hostState.getHost() + " but that host disagrees.");
if (matchJobNodeAndId(jobUUID, task, hostState.getReplicas())) {
log.warn("Host: " + hostState.getHost() + " has a replica for the task/node: " + jobUUID + "/" + task.getTaskID() + " promoting replica");
try {
rebalanceReplicas(job);
} catch (Exception e) {
log.warn("Exception promoting replica during job synchronization on host: " + taskHost + " job/node" + job.getId() + "/" + job.getId());
}
} else {
log.warn("Host: " + hostState.getHost() + " does NOT have a replica for the task/node: " + jobUUID + "/" + task.getTaskID());
}
} else {
log.warn("Spawn and minion agree, job/node: " + jobUUID + "/" + task.getTaskID() + " is on host: " + hostState.getHost());
}
}
return true;
}
private boolean matchJobNodeAndId(String jobUUID, JobTask task, JobKey[]... jobKeys) {
for (JobKey[] jobKeyArray : jobKeys) {
for (JobKey jobKey : jobKeyArray) {
if (jobKey == null) {
log.warn("runningJob was null, this shouldn't happen");
continue;
} else if (jobKey.getJobUuid() == null) {
log.warn("JobUUID for jobKey: " + jobKey + " was null");
continue;
} else if (jobKey.getNodeNumber() == null) {
log.warn("NodeNumber for jobKey: " + jobKey + " was null");
continue;
}
if (jobKey.getJobUuid().equals(jobUUID) && jobKey.getNodeNumber().equals(task.getTaskID())) {
return true;
}
}
}
return false;
}
public List<HostState> getLiveHostsByReadOnlyStatus(String minionType, boolean readonly) {
List<HostState> allHosts = listHostStatus(minionType);
List<HostState> rv = new ArrayList<>(allHosts.size());
for (HostState host : allHosts) {
if (host.isUp() && !host.isDead() && host.isReadOnly() == readonly) {
rv.add(host);
}
}
return rv;
}
/**
* Reallocate some of a job's tasks to different hosts, hopefully improving its performance.
*
* @param jobUUID The ID of the job
* @param tasksToMove The number of tasks to move. If <= 0, use the default.
* @param autobalance Whether the reallocation was triggered by autobalance logic, in which case smaller limits are used.
* @return a list of move assignments that were attempted
*/
public List<JobTaskMoveAssignment> reallocateJob(String jobUUID, int tasksToMove, boolean readonly, boolean autobalance) {
Job job;
if (jobUUID == null || (job = getJob(jobUUID)) == null) {
throw new NullPointerException("invalid job uuid");
}
if (job.getState() != JobState.IDLE) {
log.warn("[job.reallocate] can't reallocate non-idle job");
return null;
}
List<JobTaskMoveAssignment> assignments = balancer.getAssignmentsForJobReallocation(job, tasksToMove, getLiveHostsByReadOnlyStatus(job.getMinionType(), readonly));
executeReallocationAssignments(assignments, false);
return assignments;
}
/**
* Promote a task to live on one of its replica hosts, demoting the existing live to a replica.
*
* @param jobUUID job ID
* @param node task #
* @param replicaHostID The host holding the replica that should be promoted
* @param kickOnComplete Whether to kick the task after the move is complete
* @param ignoreQuiesce Whether the kick can ignore quiesce (because it's a manual kick that was submitted while spawn was quiesced)
* @return true on success
*/
public boolean swapTask(String jobUUID, int node, String replicaHostID, boolean kickOnComplete, boolean ignoreQuiesce) {
JobTask task = getTask(jobUUID, node);
if (task == null) {
log.warn("[task.swap] received null task for " + jobUUID);
return false;
}
if (!checkHostStatesForSwap(task.getJobKey(), task.getHostUUID(), replicaHostID, true)) {
log.warn("[swap.task.stopped] failed; exiting");
return false;
}
Job job;
jobLock.lock();
try {
job = getJob(jobUUID);
task.replaceReplica(replicaHostID, task.getHostUUID());
task.setHostUUID(replicaHostID);
queueJobTaskUpdateEvent(job);
} finally {
jobLock.unlock();
}
if (kickOnComplete) {
try {
scheduleTask(job, task, expandJob(job));
} catch (Exception e) {
log.warn("Warning: failed to kick task " + task.getJobKey() + " with: " + e, e);
}
}
return true;
}
/**
* Get a replacement host for a new task
*
* @param job The job for the task to be reassigned
* @return A replacement host ID
*/
private String getReplacementHost(Job job) {
List<HostState> hosts = getLiveHostsByReadOnlyStatus(job.getMinionType(), false);
for (HostState host : hosts) {
if (host.canMirrorTasks()) {
return host.getHostUuid();
}
}
return hosts.get(0).getHostUuid();
}
/**
* Given a new task, replace any hosts that are down/disabled to ensure that it can kick
*
* @param task The task to modify
* @return True if at least one host was removed
*/
private boolean replaceDownHosts(JobTask task) {
Job job = getJob(task.getJobKey());
if (job == null) {
return false;
}
HostState host = getHostState(task.getHostUUID());
boolean changed = false;
if (host == null || !host.canMirrorTasks()) {
task.setHostUUID(getReplacementHost(job));
changed = true;
}
if (task.getReplicas() != null) {
List<JobTaskReplica> tempReplicas = new ArrayList<>(task.getReplicas());
for (JobTaskReplica replica : tempReplicas) {
HostState replicaHost = getHostState(replica.getHostUUID());
if (replicaHost == null || !replicaHost.canMirrorTasks()) {
changed = true;
task.setReplicas(removeReplicasForHost(replica.getHostUUID(), task.getReplicas()));
}
}
}
if (changed) {
try {
this.rebalanceReplicas(job, false);
updateJob(job);
} catch (Exception ex) {
log.warn("Failed to sent replication message for new task " + task.getJobKey() + ": " + ex, ex);
return false;
}
}
return changed;
}
/**
* Check whether it is acceptable to swap a task between two hosts
*
* @param key The task to consider swapping
* @param liveHostID The current host for the task
* @param replicaHostID The potential target host to check
* @return True if both hosts are up and have the appropriate task directory
*/
private boolean checkHostStatesForSwap(JobKey key, String liveHostID, String replicaHostID, boolean checkTargetReplica) {
if (key == null || liveHostID == null || replicaHostID == null) {
log.warn("[task.swap] failed due to null input");
return false;
}
JobTask task = getTask(key.getJobUuid(), key.getNodeNumber());
if (task == null) {
log.warn("[task.swap] failed: nonexistent task/replicas");
return false;
}
HostState liveHost = getHostState(liveHostID);
HostState replicaHost = getHostState(replicaHostID);
if (liveHost == null || replicaHost == null || liveHost.isDead() || !liveHost.isUp() || replicaHost.isDead() || !replicaHost.isUp()) {
log.warn("[task.swap] failed due to invalid host states for " + liveHostID + "," + replicaHostID);
return false;
}
if (checkTargetReplica && !isNewTask(task)) {
if (!replicaHost.hasLive(key)) {
log.warn("[task.swap] failed because the replica host " + replicaHostID + " does not have a complete replica of task " + key);
return false;
}
}
return true;
}
/**
* Push or pull tasks off of a host to balance its load with the rest of the cluster.
*
* @param hostUUID The ID of the host
* @return a boolean describing if at least one task was scheduled to be moved
*/
public RebalanceOutcome rebalanceHost(String hostUUID) {
if (hostUUID == null || !monitored.containsKey(hostUUID)) {
return new RebalanceOutcome(hostUUID, "missing host", null, null);
}
HostState host = monitored.get(hostUUID);
boolean readOnly = host.isReadOnly();
log.warn("[job.reallocate] starting reallocation for host: " + hostUUID + " host is " + (readOnly ? "" : "not") + " a read only host");
List<JobTaskMoveAssignment> assignments = balancer.getAssignmentsToBalanceHost(host, getLiveHostsByReadOnlyStatus(host.getMinionTypes(), host.isReadOnly()));
executeReallocationAssignments(assignments, false);
return new RebalanceOutcome(hostUUID, null, null, Strings.join(assignments.toArray(), "\n"));
}
/**
* Sanity-check a series of task move assignments coming from SpawnBalancer, then execute the sensible ones.
*
* @param assignments The assignments to execute
* @param limitToAvailableSlots Whether movements should honor their host's availableTaskSlots count
* @return The number of tasks that were actually moved
*/
public int executeReallocationAssignments(List<JobTaskMoveAssignment> assignments, boolean limitToAvailableSlots) {
int numExecuted = 0;
if (assignments == null) {
return numExecuted;
}
HashSet<String> jobsNeedingUpdate = new HashSet<>();
HashSet<String> hostsAlreadyMovingTasks = new HashSet<>();
for (JobTaskMoveAssignment assignment : assignments) {
if (assignment.delete()) {
log.warn("[job.reallocate] deleting " + assignment.getJobKey() + " off " + assignment.getSourceUUID());
deleteTask(assignment.getJobKey().getJobUuid(), assignment.getSourceUUID(), assignment.getJobKey().getNodeNumber(), false);
deleteTask(assignment.getJobKey().getJobUuid(), assignment.getSourceUUID(), assignment.getJobKey().getNodeNumber(), true);
} else {
String sourceHostID = assignment.getSourceUUID();
String targetHostID = assignment.getTargetUUID();
HostState targetHost = getHostState(targetHostID);
if (sourceHostID == null || targetHostID == null || sourceHostID.equals(targetHostID) || targetHost == null) {
log.warn("[job.reallocate] received invalid host assignment: from " + sourceHostID + " to " + targetHostID);
continue;
}
JobTask task = getTask(assignment.getJobKey());
Job job = getJob(task.getJobUUID());
if (job == null || job.getCopyOfTasks() == null || job.getCopyOfTasks().isEmpty()) {
log.warn("[job.reallocate] invalid or empty job");
continue;
}
if (assignment.promote()) {
log.warn("[job.reallocate] promoting " + task.getJobKey() + " on " + sourceHostID);
task.setHostUUID(sourceHostID);
List<JobTaskReplica> replicasToModify = targetHost.isReadOnly() ? task.getReadOnlyReplicas() : task.getReplicas();
removeReplicasForHost(sourceHostID, replicasToModify);
replicasToModify.add(new JobTaskReplica(targetHostID, task.getJobUUID(), task.getRunCount(), 0l));
swapTask(task.getJobUUID(), task.getTaskID(), sourceHostID, false, false);
jobsNeedingUpdate.add(task.getJobUUID());
} else {
HostState liveHost = getHostState(task.getHostUUID());
if (limitToAvailableSlots && liveHost != null && (liveHost.getAvailableTaskSlots() == 0 || hostsAlreadyMovingTasks.contains(task.getHostUUID()))) {
continue;
}
hostsAlreadyMovingTasks.add(task.getHostUUID());
JobKey key = task.getJobKey();
log.warn("[job.reallocate] replicating task " + key + " onto " + targetHostID + " as " + (assignment.isFromReplica() ? "replica" : "live"));
TaskMover tm = new TaskMover(this, key, targetHostID, sourceHostID, false);
tm.execute(false);
numExecuted++;
}
}
}
for (String jobUUID : jobsNeedingUpdate) {
try {
updateJob(getJob(jobUUID));
} catch (Exception ex) {
log.warn("WARNING: failed to update job " + jobUUID + ": " + ex, ex);
}
}
return numExecuted;
}
/**
* A method to ensure all live/replicas exist where they should, and optimize their locations if all directories are correct
*
* @param jobUUID The job id to rebalance
* @param tasksToMove The number of tasks to move. If < 0, use the default.
* @return a RebalanceOutcome describing which steps were performed
* @throws Exception If there is a failure when rebalancing replicas
*/
public RebalanceOutcome rebalanceJob(String jobUUID, int tasksToMove) throws Exception {
Job job = getJob(jobUUID);
if (jobUUID == null || job == null) {
log.warn("[job.rebalance] job uuid " + jobUUID + " not found");
return new RebalanceOutcome(jobUUID, "job not found", null, null);
}
if (job.getState() != JobState.IDLE && job.getState() != JobState.DEGRADED) {
log.warn("[job.rebalance] job must be IDLE or DEGRADED to rebalance " + jobUUID);
return new RebalanceOutcome(jobUUID, "job not idle/degraded", null, null);
}
// First, make sure each task has claimed all the replicas it should have
if (!rebalanceReplicas(job)) {
log.warn("[job.rebalance] failed to fill out replica assignments for " + jobUUID);
return new RebalanceOutcome(jobUUID, "couldn't fill out replicas", null, null);
}
try {
List<JobTaskDirectoryMatch> allMismatches = new ArrayList<>();
// Check each task to see if any live/replica directories are missing or incorrectly placed
for (JobTask task : job.getCopyOfTasks()) {
List<JobTaskDirectoryMatch> directoryMismatches = matchTaskToDirectories(task, false);
if (!directoryMismatches.isEmpty()) {
// If there are issues with a task's directories, resolve them.
resolveJobTaskDirectoryMatches(job, task, directoryMismatches, false);
allMismatches.addAll(directoryMismatches);
}
}
updateJob(job);
// If any mismatches were found, skip the optimization step
if (!allMismatches.isEmpty()) {
return new RebalanceOutcome(jobUUID, null, Strings.join(allMismatches.toArray(), "\n"), null);
} else {
// If all tasks had all expected directories, consider moving some tasks to better hosts
return new RebalanceOutcome(jobUUID, null, null, Strings.join(reallocateJob(jobUUID, tasksToMove, false, false).toArray(), "\n"));
}
} catch (Exception ex) {
log.warn("[job.rebalance] exception during rebalance for " + jobUUID, ex);
return new RebalanceOutcome(jobUUID, "exception during rebalancing: " + ex, null, null);
}
}
/**
* For a particular task, ensure all live/replica copies exist where they should
*
* @param jobId The job id to fix
* @param node The task id to fix, or -1 to fix all
* @param ignoreTaskState Whether to ignore the task's state (mostly when recovering from a host failure)
* @param orphansOnly Whether to only delete orphans for idle tasks
* @return A string description
*/
public String fixTaskDir(String jobId, int node, boolean ignoreTaskState, boolean orphansOnly) {
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return "Null job";
}
int numChanged = 0;
List<JobTask> tasks = node < 0 ? job.getCopyOfTasks() : Arrays.asList(job.getTask(node));
for (JobTask task : tasks) {
boolean shouldModifyTask = !spawnJobFixer.haveRecentlyFixedTask(task.getJobKey()) &&
(ignoreTaskState || (task.getState() == JobTaskState.IDLE || (!orphansOnly && task.getState() == JobTaskState.ERROR)));
if (log.isDebugEnabled()) {
log.debug("[fixTaskDir] considering modifying task " + task.getJobKey() + " shouldModifyTask=" + shouldModifyTask);
}
if (shouldModifyTask) {
try {
numChanged += resolveJobTaskDirectoryMatches(job, task, matchTaskToDirectories(task, false), orphansOnly) ? 1 : 0;
spawnJobFixer.markTaskRecentlyFixed(task.getJobKey());
} catch (Exception ex) {
log.warn("fixTaskDir exception " + ex, ex);
return "fixTaskDir exception (see log for more details): " + ex;
}
}
}
return "Changed " + numChanged + " tasks";
} finally {
jobLock.unlock();
}
}
public boolean resolveJobTaskDirectoryMatches(Job job, JobTask task, List<JobTaskDirectoryMatch> matches, boolean deleteOrphansOnly) throws Exception {
boolean changed = false;
for (JobTaskDirectoryMatch match : matches) {
boolean resolvedMissingLive = false;
switch (match.getType()) {
case MATCH:
continue;
case MISMATCH_MISSING_LIVE:
if (deleteOrphansOnly) {
continue;
}
changed = true;
resolveMissingLive(task);
resolvedMissingLive = true; // Only need to resolve missing live once, since all replicas will be recopied
break;
case ORPHAN_LIVE:
changed = true;
sendControlMessage(new CommandTaskDelete(match.getHostId(), job.getId(), task.getTaskID(), job.getRunCount()));
break;
default:
continue;
}
if (resolvedMissingLive) {
break;
}
}
return changed;
}
/**
* Handle the case where no living host has a copy of a task. Promote a replica if there is one, or recreate the task otherwise.
*
* @param task The task to modify.
*/
private void resolveMissingLive(JobTask task) {
HostState liveHost = getHostState(task.getHostUUID());
if (liveHost != null && liveHost.hasLive(task.getJobKey())) {
copyTaskToReplicas(task);
return;
}
boolean succeeded = false;
List<JobTaskReplica> replicas = task.getReplicas();
if (replicas != null && !replicas.isEmpty()) {
HostState host;
for (JobTaskReplica replica : replicas) {
host = replica != null ? getHostState(replica.getHostUUID()) : null;
if (host != null && host.canMirrorTasks() && !host.isReadOnly() && host.hasLive(task.getJobKey())) {
log.warn("[job.rebalance] promoting host " + host.getHostUuid() + " as live for " + task.getJobKey());
task.replaceReplica(host.getHostUuid(), task.getHostUUID());
task.setHostUUID(host.getHostUuid());
copyTaskToReplicas(task);
succeeded = true;
break;
}
}
}
if (!succeeded && getHostState(task.getHostUUID()) == null) {
// If no replica is found and the host doesn't exist, we must recreate the task somewhere else.
recreateTask(task);
}
}
private void copyTaskToReplicas(JobTask task) {
sendControlMessage(new CommandTaskReplicate(task.getHostUUID(), task.getJobUUID(), task.getTaskID(), getTaskReplicaTargets(task, task.getReplicas()), null, null, false));
}
private void recreateTask(JobTask task) {
Job job = getJob(task.getJobUUID());
Map<JobTask, String> assignmentMap = balancer.assignTasksFromMultipleJobsToHosts(Arrays.asList(task), getOrCreateHostStateList(job.getMinionType(), null));
if (assignmentMap != null && assignmentMap.containsKey(task)) {
String newHostUUID = assignmentMap.get(task);
log.warn("[job.rebalance] assigning new host for " + task.getJobUUID() + ":" + task.getTaskID() + " all data on previous host will be lost");
task.setHostUUID(newHostUUID);
task.resetTaskMetrics();
} else {
log.warn("[job.rebalance] unable to assign new host for " + task.getJobUUID() + ":" + task.getTaskID() + " could not find suitable host");
}
}
public String checkTaskDirText(String jobId, int node) {
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return "NULL JOB";
}
StringBuilder sb = new StringBuilder();
List<JobTask> tasks = node < 0 ? new ArrayList<>(job.getCopyOfTasksSorted()) : Arrays.asList(job.getTask(node));
sb.append("Directory check for job " + job.getId() + "\n");
for (JobTask task : tasks) {
sb.append("Task " + task.getTaskID() + ": " + matchTaskToDirectories(task, true) + "\n");
}
return sb.toString();
} finally {
jobLock.unlock();
}
}
public JSONArray checkTaskDirJSON(String jobId, int node) {
JSONArray resultList = new JSONArray();
jobLock.lock();
try {
Job job = getJob(jobId);
if (job == null) {
return resultList;
}
List<JobTask> tasks = node < 0 ? new ArrayList<>(job.getCopyOfTasksSorted()) : Arrays.asList(job.getTask(node));
for (JobTask task : tasks) {
List<JobTaskDirectoryMatch> taskMatches = matchTaskToDirectories(task, true);
for (JobTaskDirectoryMatch taskMatch : taskMatches) {
JSONObject jsonObject = CodecJSON.encodeJSON(taskMatch);
resultList.put(jsonObject);
}
}
} catch (Exception ex) {
log.warn("Error: checking dirs for job: " + jobId + ", node: " + node);
} finally {
jobLock.unlock();
}
return resultList;
}
public List<JobTaskDirectoryMatch> matchTaskToDirectories(JobTask task, boolean includeCorrect) {
List<JobTaskDirectoryMatch> rv = new ArrayList<>();
JobTaskDirectoryMatch match = checkHostForTask(task, task.getHostUUID());
if (includeCorrect || match.getType() != JobTaskDirectoryMatch.MatchType.MATCH) {
rv.add(match);
}
if (task.getAllReplicas() != null) {
for (JobTaskReplica replica : task.getAllReplicas()) {
match = checkHostForTask(task, replica.getHostUUID());
if (match.getType() != JobTaskDirectoryMatch.MatchType.MATCH) {
if (task.getState() == JobTaskState.REPLICATE || task.getState() == JobTaskState.FULL_REPLICATE) {
// If task is replicating, it will temporarily look like it's missing on the target host. Make this visible to the UI.
rv.add(new JobTaskDirectoryMatch(JobTaskDirectoryMatch.MatchType.REPLICATE_IN_PROGRESS, match.getJobKey(), match.getHostId()));
} else {
rv.add(match);
}
}
else if (includeCorrect) {
rv.add(match);
}
}
}
rv.addAll(findOrphansForTask(task));
return rv;
}
private JobTaskDirectoryMatch checkHostForTask(JobTask task, String hostID) {
JobTaskDirectoryMatch.MatchType type;
HostState host = getHostState(hostID);
if (host == null || !host.hasLive(task.getJobKey())) {
type = JobTaskDirectoryMatch.MatchType.MISMATCH_MISSING_LIVE;
} else {
type = JobTaskDirectoryMatch.MatchType.MATCH;
}
return new JobTaskDirectoryMatch(type, task.getJobKey(), hostID);
}
private List<JobTaskDirectoryMatch> findOrphansForTask(JobTask task) {
List<JobTaskDirectoryMatch> rv = new ArrayList<>();
Job job = getJob(task.getJobUUID());
if (job == null) {
log.warn("got find orphans request for missing job " + task.getJobUUID());
return rv;
}
Set<String> expectedTaskHosts = task.getAllTaskHosts();
for (HostState host : listHostStatus(job.getMinionType())) {
if (host == null || !host.isUp() || host.isDead() || host.getHostUuid().equals(task.getRebalanceTarget())) {
continue;
}
if (!expectedTaskHosts.contains(host.getHostUuid())) {
JobTaskDirectoryMatch.MatchType type = null;
if (host.hasLive(task.getJobKey()) || host.hasIncompleteReplica(task.getJobKey())) {
type = JobTaskDirectoryMatch.MatchType.ORPHAN_LIVE;
}
if (type != null) {
rv.add(new JobTaskDirectoryMatch(type, task.getJobKey(), host.getHostUuid()));
}
}
}
return rv;
}
private class TaskMover {
/**
* This class moves a task from a source host to a target host.
* If the target host already had a replica of the task, that
* replica is removed so the task will make a new replica somewhere
* else.
*/
private final JobKey taskKey;
private final String targetHostUUID;
private final String sourceHostUUID;
private HostState targetHost;
private Job job;
private JobTask task;
private boolean kickOnComplete;
private boolean isMigration;
private final Spawn spawn;
TaskMover(Spawn spawn, JobKey taskKey, String targetHostUUID, String sourceHostUUID, boolean kickOnComplete) {
this.spawn = spawn;
this.taskKey = taskKey;
this.targetHostUUID = targetHostUUID;
this.sourceHostUUID = sourceHostUUID;
this.kickOnComplete = kickOnComplete;
}
public void setMigration(boolean isMigration) {
this.isMigration = isMigration;
}
public String choreWatcherKey() {
return targetHostUUID + "&&&" + taskKey;
}
private void startReplicate() throws Exception {
ReplicaTarget[] target = new ReplicaTarget[]{
new ReplicaTarget(
targetHostUUID,
targetHost.getHost(),
targetHost.getUser(),
targetHost.getPath(),
task.getReplicationFactor())
};
job.setSubmitCommand(getCommand(job.getCommand()));
JobCommand jobcmd = job.getSubmitCommand();
CommandTaskReplicate replicate = new CommandTaskReplicate(
task.getHostUUID(), task.getJobUUID(), task.getTaskID(), target, Strings.join(jobcmd.getCommand(), " "), choreWatcherKey(), true);
replicate.setRebalanceSource(sourceHostUUID);
replicate.setRebalanceTarget(targetHostUUID);
spawn.sendControlMessage(replicate);
log.warn("[task.mover] replicating job/task " + task.getJobKey() + " from " + sourceHostUUID + " onto host " + targetHostUUID);
}
public boolean execute(boolean allowQueuedTasks) {
targetHost = spawn.getHostState(targetHostUUID);
if (taskKey == null || !spawn.checkStatusForMove(targetHostUUID) || !spawn.checkStatusForMove(sourceHostUUID)) {
log.warn("[task.mover] erroneous input; terminating for: " + taskKey);
return false;
}
job = spawn.getJob(taskKey);
task = job.getTask(taskKey.getNodeNumber());
if (task == null) {
log.warn("[task.mover] failed to find job or task for: " + taskKey);
return false;
}
HostState liveHost = spawn.getHostState(task.getHostUUID());
if (liveHost == null || !liveHost.hasLive(task.getJobKey())) {
log.warn("[task.mover] failed to find live task for: " + taskKey);
return false;
}
if (!task.getHostUUID().equals(sourceHostUUID) && !task.hasReplicaOnHost(sourceHostUUID)) {
log.warn("[task.mover] failed because the task does not have a copy on the specified source: " + taskKey);
}
if (task.getAllTaskHosts().contains(targetHostUUID) || targetHost.hasLive(taskKey)) {
log.warn("[task.mover] cannot move onto a host with an existing version of task: " + taskKey);
return false;
}
if (!spawn.prepareTaskStatesForRebalance(job, task, allowQueuedTasks, isMigration)) {
log.warn("[task.mover] couldn't set task states; terminating for: " + taskKey);
return false;
}
try {
task.setRebalanceSource(sourceHostUUID);
task.setRebalanceTarget(targetHostUUID);
startReplicate();
return true;
} catch (Exception ex) {
log.warn("[task.mover] exception during replicate initiation; terminating for task: " + taskKey, ex);
task.setErrorCode(JobTaskErrorCode.EXIT_REPLICATE_FAILURE);
task.setState(JobTaskState.ERROR);
return false;
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("TaskMover");
sb.append("{taskKey=").append(taskKey);
sb.append(", targetHostUUID='").append(targetHostUUID).append('\'');
sb.append(", sourceHostUUID='").append(sourceHostUUID).append('\'');
sb.append(", job=").append(job);
sb.append(", task=").append(task);
sb.append(", kickOnComplete=").append(kickOnComplete);
sb.append('}');
return sb.toString();
}
}
public boolean checkStatusForMove(String hostID) {
HostState host = getHostState(hostID);
if (host == null) {
log.warn("[host.status] received null host for id " + hostID);
return false;
}
if (host.isDead() || !host.isUp()) {
log.warn("[host.status] host is down: " + hostID);
return false;
}
return true;
}
public boolean prepareTaskStatesForRebalance(Job job, JobTask task, boolean allowQueuedTasks, boolean isMigration) {
jobLock.lock();
try {
if (task.getState() != JobTaskState.IDLE && (!allowQueuedTasks && task.getState() != JobTaskState.QUEUED)) {
log.warn("[task.mover] decided not to move non-idle task " + task);
return false;
}
JobTaskState newState = isMigration ? JobTaskState.MIGRATING : JobTaskState.REBALANCE;
job.setTaskState(task, newState, true);
queueJobTaskUpdateEvent(job);
return true;
} finally {
jobLock.unlock();
}
}
/**
* exclude failed hosts from eligible pool
* iterate over tasks
* assemble hosts job spread across
* count replicas per host
* iterate over tasks and make reductions
* iterate over tasks and make additions
* exclude task host from replica
* assign in order of least replicas per host
* <p/>
* TODO synchronize on job
* TODO allow all cluster hosts to be considered for replicas
* TODO consider host group "rack aware" keep 1/first replica in same group
*
* @return true if rebalance was successful
*/
public boolean rebalanceReplicas(Job job) throws Exception {
// perform read/write and read only replication
return rebalanceReplicas(job, false) && rebalanceReplicas(job, true);
}
/**
* exclude failed hosts from eligible pool
* iterate over tasks
* assemble hosts job spread across
* count replicas per host
* iterate over tasks and make reductions
* iterate over tasks and make additions
* exclude task host from replica
* assign in order of least replicas per host
* <p/>
* TODO synchronize on job
* TODO allow all cluster hosts to be considered for replicas
* TODO consider host group "rack aware" keep 1/first replica in same group
*
* @param job the job to rebalance replicas
* @param taskID The task # to fill out replicas, or -1 for all tasks
* @param readOnly Whether to fill out readonly replicas or standard replicas
* @return true if rebalance was successful
*/
public boolean rebalanceReplicas(Job job, int taskID, boolean readOnly) throws Exception {
if (job == null) {
return false;
}
boolean success = true;
// Ensure that there aren't any replicas pointing towards the live host or duplicate replicas
balancer.removeInvalidReplicas(job, readOnly);
// Ask SpawnBalancer where new replicas should be sent
Map<Integer, List<String>> replicaAssignments = balancer.getAssignmentsForNewReplicas(job, taskID, readOnly);
List<JobTask> tasks = taskID > 0 ? Arrays.asList(job.getTask(taskID)) : job.getCopyOfTasks();
for (JobTask task : tasks) {
List<String> replicasToAdd = replicaAssignments.get(task.getTaskID());
// Make the new replicas as dictated by SpawnBalancer
if (readOnly) {
task.setReadOnlyReplicas(addReplicasAndRemoveExcess(task, replicasToAdd, job.getReadOnlyReplicas(), task.getReadOnlyReplicas()));
} else {
task.setReplicas(addReplicasAndRemoveExcess(task, replicasToAdd, job.getReplicas(), task.getReplicas()));
}
}
if (!readOnly) {
success = validateReplicas(job);
}
return success;
}
public boolean rebalanceReplicas(Job job, boolean readOnly) throws Exception {
return rebalanceReplicas(job, -1, readOnly);
}
/**
* check all tasks. If there are still not enough replicas, record failure.
*
* @param job - the job to validate
* @return true if the job has met its replica requirements
*/
private boolean validateReplicas(Job job) {
for (JobTask task : job.getCopyOfTasks()) {
List<JobTaskReplica> replicas = task.getReplicas();
if (job.getReplicas() > 0) {
if (replicas == null || replicas.size() < job.getReplicas()) {
HostState currHost = getHostState(task.getHostUUID());
if ((currHost == null || currHost.isDead()) && (replicas == null || replicas.size() == 0)) // If current host is dead and there are no replicas, mark degraded
{
job.setState(JobState.DEGRADED);
} else {
job.setState(JobState.ERROR); // Otherwise, just mark errored so we will know that at least on replica failed
job.setEnabled(false);
}
log.warn("[replica.add] ERROR - unable to replicate task because there are not enough suitable hosts, job: " + job.getId());
return false;
}
}
}
return true;
}
private List<JobTaskReplica> addReplicasAndRemoveExcess(JobTask task, List<String> replicaHostsToAdd,
int desiredNumberOfReplicas,
List<JobTaskReplica> currentReplicas) throws Exception {
List<JobTaskReplica> newReplicas = (currentReplicas == null ? new ArrayList<JobTaskReplica>() : new ArrayList<>(currentReplicas));
if (replicaHostsToAdd != null) {
newReplicas.addAll(replicateTask(task, replicaHostsToAdd));
}
if (!isNewTask(task)) {
while (newReplicas.size() > desiredNumberOfReplicas) {
JobTaskReplica replica = newReplicas.remove(newReplicas.size() - 1);
spawnMQ.sendControlMessage(new CommandTaskDelete(replica.getHostUUID(), task.getJobUUID(), task.getTaskID(), task.getRunCount()));
log.warn("[replica.delete] " + task.getJobUUID() + "/" + task.getTaskID() + " from " + replica.getHostUUID() + " @ " + getHostState(replica.getHostUUID()).getHost());
}
}
return newReplicas;
}
protected List<JobTaskReplica> replicateTask(JobTask task, List<String> targetHosts) {
List<JobTaskReplica> newReplicas = new ArrayList<>();
for (String targetHostUUID : targetHosts) {
JobTaskReplica replica = new JobTaskReplica();
replica.setHostUUID(targetHostUUID);
replica.setJobUUID(task.getJobUUID());
newReplicas.add(replica);
}
Job job = getJob(task.getJobUUID());
JobCommand jobcmd = job.getSubmitCommand();
String command = (jobcmd != null && jobcmd.getCommand() != null) ? Strings.join(jobcmd.getCommand(), " ") : null;
spawnMQ.sendControlMessage(new CommandTaskReplicate(task.getHostUUID(), task.getJobUUID(), task.getTaskID(), getTaskReplicaTargets(task, newReplicas), command, null, false));
log.warn("[replica.add] " + task.getJobUUID() + "/" + task.getTaskID() + " to " + targetHosts);
return newReplicas;
}
private void updateJobDependencies(IJob job) {
String jobId = job.getId();
DirectedGraph<String> dependencies = spawnState.jobDependencies;
Set<String> sources = dependencies.getSourceEdges(jobId);
if (sources != null) {
for (String source : sources) {
dependencies.removeEdge(source, jobId);
}
} else {
dependencies.addNode(jobId);
}
Set<String> newSources = this.getDataSources(jobId);
if (newSources != null) {
for (String source : newSources) {
dependencies.addEdge(source, jobId);
}
}
}
/**
* Submit a config update to the job store
*
* @param jobId The job to submit
* @param commitMessage If specified, the commit message to use
*/
public void submitConfigUpdate(String jobId, String commitMessage) {
Job job;
if (jobId == null || jobId.isEmpty() || (job = getJob(jobId)) == null) {
return;
}
if (jobStore != null) {
jobStore.submitConfigUpdate(job.getId(), job.getOwner(), getJobConfig(jobId), commitMessage);
}
}
public void updateJob(IJob ijob) throws Exception {
updateJob(ijob, true);
}
/**
* requires 'job' to be a different object from the one in cache. make sure
* to clone() any job fetched from cache before submitting to updateJob().
*/
public void updateJob(IJob ijob, boolean reviseReplicas) throws Exception {
if (useZk) {
Job job = new Job(ijob);
jobLock.lock();
try {
require(getJob(job.getId()) != null, "job " + job.getId() + " does not exist");
updateJobDependencies(job);
Job oldjob = putJobInSpawnState(job);
// take action on trigger changes (like # replicas)
if (oldjob != job && reviseReplicas) {
int oldReplicaCount = oldjob.getReplicas();
int newReplicaCount = job.getReplicas();
require(oldReplicaCount == newReplicaCount || job.getState() == JobState.IDLE || job.getState() == JobState.DEGRADED, "job must be IDLE or DEGRADED to change replicas");
require(newReplicaCount < monitored.size(), "replication factor must be < # live hosts");
rebalanceReplicas(job);
}
sendJobUpdateEvent(job);
} finally {
jobLock.unlock();
}
}
}
public void putAlert(String alertId, JobAlert alert) {
jobAlertRunner.putAlert(alertId, alert);
}
public void removeAlert(String alertId) {
jobAlertRunner.removeAlert(alertId);
}
public JSONArray fetchAllAlertsArray() {
return jobAlertRunner.getAlertStateArray();
}
public JSONObject fetchAllAlertsMap() {
return jobAlertRunner.getAlertStateMap();
}
public String getAlert(String alertId) {
return jobAlertRunner.getAlert(alertId);
}
public static enum DeleteStatus {
SUCCESS, JOB_MISSING, JOB_DO_NOT_DELETE
}
public DeleteStatus deleteJob(String jobUUID) throws Exception {
jobLock.lock();
try {
Job job = getJob(jobUUID);
if (job == null) {
return DeleteStatus.JOB_MISSING;
}
if (job.getDontDeleteMe()) {
return DeleteStatus.JOB_DO_NOT_DELETE;
}
spawnState.jobs.remove(jobUUID);
spawnState.jobDependencies.removeNode(jobUUID);
log.warn("[job.delete] " + job.getId() + " >> " + job.getCopyOfTasks());
spawnMQ.sendControlMessage(new CommandTaskDelete(HostMessage.ALL_HOSTS, job.getId(), null, job.getRunCount()));
sendJobUpdateEvent("job.delete", job);
if (jobConfigManager != null) {
jobConfigManager.deleteJob(job.getId());
}
if (jobStore != null) {
jobStore.delete(jobUUID);
}
return DeleteStatus.SUCCESS;
} finally {
jobLock.unlock();
}
}
public void sendControlMessage(HostMessage hostMessage) {
spawnMQ.sendControlMessage(hostMessage);
}
/**
* Deletes a job only a specific host, useful when there are replicas and
* a job has been migrated to another host
*
* @param jobUUID The job to delete
* @param hostUuid The host where the delete message should be sent
* @param node The specific task to be deleted
* @param isReplica Whether the task to be deleted is a replica or a live
* @return True if the task is successfully removed
*/
public boolean deleteTask(String jobUUID, String hostUuid, Integer node, boolean isReplica) {
jobLock.lock();
try {
if (jobUUID == null || node == null) {
return false;
}
log.warn("[job.delete.host] " + hostUuid + "/" + jobUUID + " >> " + node);
spawnMQ.sendControlMessage(new CommandTaskDelete(hostUuid, jobUUID, node, 0));
Job job = getJob(jobUUID);
if (isReplica && job != null) {
JobTask task = job.getTask(node);
task.setReplicas(removeReplicasForHost(hostUuid, task.getReplicas()));
task.setReadOnlyReplicas(removeReplicasForHost(hostUuid, task.getReadOnlyReplicas()));
queueJobTaskUpdateEvent(job);
}
return true;
} finally {
jobLock.unlock();
}
}
private List<JobTaskReplica> removeReplicasForHost(String hostUuid, List<JobTaskReplica> currentReplicas) {
if (currentReplicas == null || currentReplicas.size() == 0) {
return new ArrayList<>();
}
List<JobTaskReplica> replicasCopy = new ArrayList<>(currentReplicas);
Iterator<JobTaskReplica> iterator = replicasCopy.iterator();
while (iterator.hasNext()) {
JobTaskReplica replica = iterator.next();
if (replica.getHostUUID().equals(hostUuid)) {
iterator.remove();
}
}
return replicasCopy;
}
/**
* The entry point for requests to start every task from a job (for example, from the UI.)
*
* @param jobUUID Job ID
* @param isManualKick Whether the task came from the interface, which is given special treatment during quiesce
* @throws Exception
*/
public void startJob(String jobUUID, boolean isManualKick) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
require(job.isEnabled(), "job disabled");
require(scheduleJob(job, isManualKick), "unable to schedule job");
sendJobUpdateEvent(job);
}
public String expandJob(String jobUUID) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
return expandJob(job);
}
public String expandJob(Job job) throws TokenReplacerOverflowException {
return expandJob(job.getId(), job.getParameters(), getJobConfig(job.getId()));
}
public boolean moveTask(String jobID, int node, String sourceUUID, String targetUUID) {
if (sourceUUID == null || targetUUID == null || sourceUUID.equals(targetUUID)) {
log.warn("[task.move] fail: invalid input " + sourceUUID + "," + targetUUID);
return false;
}
TaskMover tm = new TaskMover(this, new JobKey(jobID, node), targetUUID, sourceUUID, false);
log.warn("[task.move] attempting move for " + jobID + " / " + node);
return tm.execute(false);
}
public String expandJob(String id, Collection<JobParameter> parameters, String rawConfig)
throws TokenReplacerOverflowException {
// macro recursive expansion
String pass0 = JobExpand.macroExpand(this, rawConfig);
// template in params that "may" contain other macros
String pass1 = JobExpand.macroTemplateParams(pass0, parameters);
// macro recursive expansion again
String pass2 = JobExpand.macroExpand(this, pass1);
// replace remaining params not caught in pass 1
String pass3 = JobExpand.macroTemplateParams(pass2, parameters);
// inject job metadata from spawn
return JobExpand.magicMacroExpand(this, pass3, id);
}
public void stopJob(String jobUUID) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
for (JobTask task : job.getCopyOfTasks()) {
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
}
stopTask(jobUUID, task.getTaskID());
}
job.setHadMoreData(false);
}
public void killJob(String jobUUID) throws Exception {
boolean success = false;
while (!success & !shuttingDown.get()) {
try {
jobLock.lock();
if (taskQueuesByPriority.tryLock()) {
success = true;
Job job = getJob(jobUUID);
require(job != null, "job not found");
for (JobTask task : job.getCopyOfTasks()) {
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
}
killTask(jobUUID, task.getTaskID());
}
job.setHadMoreData(false);
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
}
}
/**
* not a clone like jobs, because there is no updater.
* yes, there is no clean symmetry here. it could use cleanup.
*/
public JobTask getTask(String jobUUID, int taskID) {
Job job = getJob(jobUUID);
if (job != null) {
return job.getTask(taskID);
}
return null;
}
public JobTask getTask(JobKey jobKey) {
if (jobKey == null || jobKey.getJobUuid() == null || jobKey.getNodeNumber() == null) {
return null;
}
return getTask(jobKey.getJobUuid(), jobKey.getNodeNumber());
}
/**
* The entry point for requests to start tasks (for example, from the UI.) Does some checking, and ultimately
* kicks the task or adds it to the task queue as appropriate
*
* @param jobUUID Job ID
* @param taskID Node #
* @param addToQueue Whether the task should be added to the queue (false if the task is already on the queue)
* @param isManualKick Whether the task came from the interface, which is given special treatment during quiesce
* @param toQueueHead Whether to add the task to the head of the queue rather than the end
* @throws Exception When the task is invalid or already active
*/
public void startTask(String jobUUID, int taskID, boolean addToQueue, boolean isManualKick, boolean toQueueHead) throws Exception {
Job job = getJob(jobUUID);
require(job != null, "job not found");
require(job.isEnabled(), "job is disabled");
require(job.getState() != JobState.DEGRADED, "job in degraded state");
require(taskID >= 0, "invalid task id");
JobTask task = getTask(jobUUID, taskID);
require(task != null, "no such task");
require(task.getState() != JobTaskState.BUSY && task.getState() != JobTaskState.ALLOCATED &&
task.getState() != JobTaskState.QUEUED, "invalid task state");
if (addToQueue) {
addToTaskQueue(task.getJobKey(), isManualKick && quiesce, toQueueHead);
} else {
kickIncludingQueue(job, task, expandJob(job), false, isManualKick && quiesce);
}
log.warn("[task.kick] started " + job.getId() + " / " + task.getTaskID() + " = " + job.getDescription());
queueJobTaskUpdateEvent(job);
}
public void stopTask(String jobUUID, int taskID) throws Exception {
stopTask(jobUUID, taskID, false, false);
}
private void stopTask(String jobUUID, int taskID, boolean force, boolean onlyIfQueued) throws Exception {
Job job = getJob(jobUUID);
JobTask task = getTask(jobUUID, taskID);
if (job != null && task != null) {
taskQueuesByPriority.setStoppedJob(true); // Terminate the current queue iteration cleanly
HostState host = getHostState(task.getHostUUID());
if (force) {
task.setRebalanceSource(null);
task.setRebalanceTarget(null);
}
if (task.getState() == JobTaskState.QUEUED) {
removeFromQueue(task);
log.warn("[taskQueuesByPriority] queued job " + jobUUID);
} else if (task.getState() == JobTaskState.REBALANCE) {
log.warn("[task.stop] " + task.getJobKey() + " rebalance stopped with force=" + force);
} else if (force && (task.getState() == JobTaskState.REVERT)) {
log.warn("[task.stop] " + task.getJobKey() + " killed in state " + task.getState());
int code = JobTaskErrorCode.EXIT_REVERT_FAILURE;
job.errorTask(task, code);
queueJobTaskUpdateEvent(job);
} else if (force && (host == null || host.isDead() || !host.isUp())) {
log.warn("[task.stop] " + task.getJobKey() + " killed on down host");
job.errorTask(task, 1);
queueJobTaskUpdateEvent(job);
// Host is unreachable; bail once the task is errored.
return;
} else if (host != null && !host.hasLive(task.getJobKey())) {
log.warn("[task.stop] node that minion doesn't think is running: " + task.getJobKey());
job.setTaskState(task, JobTaskState.IDLE);
queueJobTaskUpdateEvent(job);
}
else if (task.getState() == JobTaskState.ALLOCATED) {
log.warn("[task.stop] node in allocated state " + jobUUID + "/" + taskID + " host = " + (host != null ? host.getHost() : "unknown"));
}
// The following is called regardless of task state, unless the host is nonexistent/failed
if (host != null) {
spawnMQ.sendControlMessage(new CommandTaskStop(host.getHostUuid(), jobUUID, taskID, job.getRunCount(), force, onlyIfQueued));
} else {
log.warn("[task.stop]" + jobUUID + "/" + taskID + "]: no host monitored for uuid " + task.getHostUUID());
}
} else {
log.warn("[task.stop]" + jobUUID + "]: no nodes");
}
}
protected boolean removeFromQueue(JobTask task) {
boolean removed = false;
Job job = getJob(task.getJobUUID());
if (job != null) {
log.warn("[taskQueuesByPriority] setting " + task.getJobKey() + " as idle and removing from queue");
job.setTaskState(task, JobTaskState.IDLE);
removed = taskQueuesByPriority.remove(job.getPriority(), task.getJobKey());
queueJobTaskUpdateEvent(job);
sendTaskQueueUpdateEvent();
}
writeSpawnQueue();
return removed;
}
public void killTask(String jobUUID, int taskID) throws Exception {
stopTask(jobUUID, taskID, true, false);
}
public void revertJobOrTask(String jobUUID, int taskID, String backupType, int rev, long time) throws Exception {
if (taskID == -1) {
// Revert entire job
Job job = getJob(jobUUID);
int numTasks = job.getTaskCount();
for (int i = 0; i < numTasks; i++) {
log.warn("[task.revert] " + jobUUID + "/" + i);
revert(jobUUID, backupType, rev, time, i);
}
} else {
// Revert single task
log.warn("[task.revert] " + jobUUID + "/" + taskID);
revert(jobUUID, backupType, rev, time, taskID);
}
}
private void revert(String jobUUID, String backupType, int rev, long time, int taskID) throws Exception {
JobTask task = getTask(jobUUID, taskID);
if (task != null) {
task.setPreFailErrorCode(0);
HostState host = getHostState(task.getHostUUID());
if (task.getState() == JobTaskState.ALLOCATED || task.getState() == JobTaskState.QUEUED) {
log.warn("[task.revert] node in allocated state " + jobUUID + "/" + task.getTaskID() + " host = " + host.getHost());
}
log.warn("[task.revert] sending revert message to host: " + host.getHost() + "/" + host.getHostUuid());
spawnMQ.sendControlMessage(new CommandTaskRevert(host.getHostUuid(), jobUUID, task.getTaskID(), backupType, rev, time, getTaskReplicaTargets(task, task.getAllReplicas()), false));
} else {
log.warn("[task.revert] task " + jobUUID + "/" + taskID + "] not found");
}
}
public Collection<String> listCommands() {
synchronized (spawnState.commands) {
return spawnState.commands.keySet();
}
}
public JobCommand getCommand(String key) {
synchronized (spawnState.commands) {
return spawnState.commands.get(key);
}
}
public void putCommand(String key, JobCommand command, boolean store) throws Exception {
synchronized (spawnState.commands) {
spawnState.commands.put(key, command);
}
if (useZk && store) {
spawnDataStore.putAsChild(SPAWN_COMMON_COMMAND_PATH, key, new String(codec.encode(command)));
}
}
public boolean deleteCommand(String key) throws Exception {
/* prevent deletion of commands used in jobs */
for (Job job : listJobs()) {
if (job.getCommand() != null && job.getCommand().equals(key)) {
return false;
}
}
synchronized (spawnState.commands) {
JobCommand cmd = spawnState.commands.remove(key);
if (cmd != null) {
spawnDataStore.deleteChild(SPAWN_COMMON_COMMAND_PATH, key);
return true;
} else {
return false;
}
}
}
public Collection<String> listMacros() {
synchronized (spawnState.macros) {
return spawnState.macros.keySet();
}
}
public JobMacro getMacro(String key) {
synchronized (spawnState.macros) {
return spawnState.macros.get(key.trim());
}
}
public void putMacro(String key, JobMacro macro, boolean store) throws Exception {
key = key.trim();
synchronized (spawnState.macros) {
spawnState.macros.put(key, macro);
}
if (store) {
spawnDataStore.putAsChild(SPAWN_COMMON_MACRO_PATH, key, new String(codec.encode(macro)));
}
}
public boolean deleteMacro(String key) {
/* prevent deletion of macros used in job configs */
for (Job job : listJobs()) {
String rawconf = getJobConfig(job.getId());
if (rawconf != null && rawconf.contains("%{" + key + "}%")) {
return false;
}
}
synchronized (spawnState.macros) {
JobMacro macro = spawnState.macros.remove(key);
if (macro != null) {
spawnDataStore.deleteChild(SPAWN_COMMON_MACRO_PATH, key);
return true;
} else {
return false;
}
}
}
// --------------------- END API ----------------------
private List<HostState> getOrCreateHostStateList(String minionType, Collection<String> hostList) {
List<HostState> hostStateList;
if (hostList == null || hostList.size() == 0) {
hostStateList = balancer.sortHostsByActiveTasks(listHostStatus(minionType));
} else {
hostStateList = new ArrayList<>();
for (String hostId : hostList) {
hostStateList.add(getHostState(hostId));
}
}
return hostStateList;
}
/**
* mq message dispatch
*/
protected void handleMessage(CoreMessage core) {
Job job;
JobTask task;
if (deadMinionMembers.getMemberSet().contains(core.getHostUuid())) {
log.warn("[mq.core] ignoring message from host: " + core.getHostUuid() + " because it is dead");
return;
}
switch (core.getMessageType()) {
default:
log.warn("[mq.core] unhandled type = " + core.getMessageType());
break;
case CMD_TASK_NEW:
// ignore these replication-related messages sent by minions
break;
case STATUS_HOST_INFO:
Set<String> upMinions = minionMembers.getMemberSet();
HostState state = (HostState) core;
HostState oldState = getHostState(state.getHostUuid());
if (oldState == null) {
log.warn("[host.status] from unmonitored " + state.getHostUuid() + " = " + state.getHost() + ":" + state.getPort());
}
boolean hostEnabled = true;
synchronized (disabledHosts) {
if (disabledHosts.contains(state.getHost()) || disabledHosts.contains(state.getHostUuid())) {
hostEnabled = false;
state.setDisabled(true);
} else {
state.setDisabled(false);
}
}
// Propagate minion state for ui
if (upMinions.contains(state.getHostUuid()) && hostEnabled) {
state.setUp(true);
}
state.setUpdated();
sendHostUpdateEvent(state);
updateHostState(state);
break;
case STATUS_TASK_BEGIN:
StatusTaskBegin begin = (StatusTaskBegin) core;
tasksStartedPerHour.mark();
if (debug("-begin-")) {
log.info("[task.begin] :: " + begin.getJobKey());
}
try {
job = getJob(begin.getJobUuid());
if (job == null) {
log.warn("[task.begin] on dead job " + begin.getJobKey() + " from " + begin.getHostUuid());
break;
}
if (job.getStartTime() == null) {
job.setStartTime(System.currentTimeMillis());
}
JobTask node = null;
for (JobTask jobNode : job.getCopyOfTasks()) {
if (jobNode.getTaskID() == begin.getNodeID()) {
node = jobNode;
break;
}
}
if (node != null) {
job.setTaskState(node, JobTaskState.BUSY);
node.incrementStarts();
queueJobTaskUpdateEvent(job);
} else {
log.warn("[task.begin] done report for missing node " + begin.getJobKey());
}
} catch (Exception ex) {
log.warn("", ex);
}
break;
case STATUS_TASK_CANT_BEGIN:
StatusTaskCantBegin cantBegin = (StatusTaskCantBegin) core;
log.info("[task.cantbegin] received cantbegin from " + cantBegin.getHostUuid() + " for task " + cantBegin.getJobUuid() + "," + cantBegin.getNodeID());
job = getJob(cantBegin.getJobUuid());
task = getTask(cantBegin.getJobUuid(), cantBegin.getNodeID());
if (job != null && task != null) {
try {
job.setTaskState(task, JobTaskState.IDLE);
log.info("[task.cantbegin] kicking " + task.getJobKey());
startTask(cantBegin.getJobUuid(), cantBegin.getNodeID(), true, true, true);
} catch (Exception ex) {
log.warn("[task.schedule] failed to reschedule task for " + task.getJobKey(), ex);
}
} else {
log.warn("[task.cantbegin] received cantbegin from " + cantBegin.getHostUuid() + " for nonexistent job " + cantBegin.getJobUuid());
}
break;
case STATUS_TASK_PORT:
StatusTaskPort port = (StatusTaskPort) core;
job = getJob(port.getJobUuid());
task = getTask(port.getJobUuid(), port.getNodeID());
if (task != null) {
log.info("[task.port] " + job.getId() + "/" + task.getTaskID() + " @ " + port.getPort());
task.setPort(port.getPort());
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_BACKUP:
StatusTaskBackup backup = (StatusTaskBackup) core;
job = getJob(backup.getJobUuid());
task = getTask(backup.getJobUuid(), backup.getNodeID());
if (task != null && task.getState() != JobTaskState.REBALANCE && task.getState() != JobTaskState.MIGRATING) {
log.info("[task.backup] " + job.getId() + "/" + task.getTaskID());
job.setTaskState(task, JobTaskState.BACKUP);
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REPLICATE:
StatusTaskReplicate replicate = (StatusTaskReplicate) core;
job = getJob(replicate.getJobUuid());
task = getTask(replicate.getJobUuid(), replicate.getNodeID());
if (task != null) {
log.info("[task.replicate] " + job.getId() + "/" + task.getTaskID());
JobTaskState taskState = task.getState();
if (taskState != JobTaskState.REBALANCE && taskState != JobTaskState.MIGRATING) {
job.setTaskState(task, replicate.isFullReplication() ? JobTaskState.FULL_REPLICATE : JobTaskState.REPLICATE, true);
}
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REVERT:
StatusTaskRevert revert = (StatusTaskRevert) core;
job = getJob(revert.getJobUuid());
task = getTask(revert.getJobUuid(), revert.getNodeID());
if (task != null) {
log.info("[task.revert] " + job.getId() + "/" + task.getTaskID());
job.setTaskState(task, JobTaskState.REVERT, true);
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_REPLICA:
StatusTaskReplica replica = (StatusTaskReplica) core;
job = getJob(replica.getJobUuid());
task = getTask(replica.getJobUuid(), replica.getNodeID());
if (task != null) {
for (JobTaskReplica taskReplica : task.getReplicas()) {
if (taskReplica.getHostUUID().equals(replica.getHostUuid())) {
taskReplica.setVersion(replica.getVersion());
taskReplica.setLastUpdate(replica.getUpdateTime());
}
}
log.info("[task.replica] version updated for " + job.getId() + "/" + task.getTaskID() + " ver " + task.getRunCount() + "/" + replica.getVersion());
queueJobTaskUpdateEvent(job);
}
break;
case STATUS_TASK_END:
StatusTaskEnd update = (StatusTaskEnd) core;
log.info("[task.end] :: " + update.getJobUuid() + "/" + update.getNodeID() + " exit=" + update.getExitCode());
tasksCompletedPerHour.mark();
taskQueuesByPriority.markHostAvailable(update.getHostUuid());
try {
job = getJob(update.getJobUuid());
if (job == null) {
log.warn("[task.end] on dead job " + update.getJobKey() + " from " + update.getHostUuid());
break;
}
task = getTask(update.getJobUuid(), update.getNodeID());
if (task.getHostUUID() != null && !task.getHostUUID().equals(update.getHostUuid())) {
log.warn("[task.end] received from incorrect host " + update.getHostUuid());
break;
}
handleStatusTaskEnd(job, task, update);
} catch (Exception ex) {
log.warn("Failed to handle end message: " + ex, ex);
}
break;
}
}
/**
* Handle the various actions in response to a StatusTaskEnd sent by a minion
*
* @param job The job to modify
* @param task The task to modify
* @param update The message
*/
private void handleStatusTaskEnd(Job job, JobTask task, StatusTaskEnd update) {
TaskExitState exitState = update.getExitState();
boolean more = exitState != null && exitState.hadMoreData();
boolean wasStopped = exitState != null && exitState.getWasStopped();
task.setFileCount(update.getFileCount());
task.setByteCount(update.getByteCount());
boolean errored = update.getExitCode() != 0 && update.getExitCode() != JobTaskErrorCode.REBALANCE_PAUSE;
if (update.getRebalanceSource() != null) {
handleRebalanceFinish(job, task, update);
} else {
if (exitState != null) {
task.setInput(exitState.getInput());
task.setMeanRate(exitState.getMeanRate());
task.setTotalEmitted(exitState.getTotalEmitted());
}
if (more) {
job.setHadMoreData(more);
}
task.setWasStopped(wasStopped);
}
if (errored) {
handleTaskError(job, task, update.getExitCode());
} else {
job.setTaskFinished(task);
}
if (job.isFinished() && update.getRebalanceSource() == null) {
finishJob(job, errored);
}
queueJobTaskUpdateEvent(job);
}
public void handleTaskError(Job job, JobTask task, int exitCode) {
log.warn("[task.end] " + task.getJobKey() + " exited abnormally with " + exitCode);
task.incrementErrors();
try {
spawnJobFixer.fixTask(job, task, exitCode);
} catch (Exception ex) {
job.errorTask(task, exitCode);
}
}
public void handleRebalanceFinish(Job job, JobTask task, StatusTaskEnd update) {
String rebalanceSource = update.getRebalanceSource();
String rebalanceTarget = update.getRebalanceTarget();
if (update.getExitCode() == 0) {
// Rsync succeeded. Swap to the new host, assuming it is still healthy.
task.setRebalanceSource(null);
task.setRebalanceTarget(null);
if (checkHostStatesForSwap(task.getJobKey(), rebalanceSource, rebalanceTarget, false)) {
if (task.getHostUUID().equals(rebalanceSource)) {
task.setHostUUID(rebalanceTarget);
} else {
task.replaceReplica(rebalanceSource, update.getRebalanceTarget());
}
deleteTask(job.getId(), rebalanceSource, task.getTaskID(), false);
} else {
// The hosts returned by end message were not found, or weren't in a usable state.
fixTaskDir(job.getId(), task.getTaskID(), true, true);
}
} else if (update.getExitCode() == JobTaskErrorCode.REBALANCE_PAUSE) {
// Rebalancing was paused. No special action necessary.
log.warn("[task.move] task rebalance for " + task.getJobKey() + " paused until next run");
} else {
// The rsync failed. Clean up the extra task directory.
fixTaskDir(job.getId(), task.getTaskID(), true, true);
}
}
private void doOnState(Job job, String url, String state) {
if (Strings.isEmpty(url)) {
return;
}
if (url.startsWith("http://")) {
try {
quietBackgroundPost(state + " " + job.getId(), url, codec.encode(job));
} catch (Exception e) {
log.warn("", e);
}
} else if (url.startsWith("kick://")) {
Map<String, List<String>> aliasMap = getAliases();
for (String kick : Strings.splitArray(url.substring(7), ",")) {
kick = kick.trim();
List<String> aliases = aliasMap.get(kick);
if (aliases != null) {
for (String alias : aliases) {
safeStartJob(alias.trim());
}
} else {
safeStartJob(kick);
}
}
} else {
log.warn("invalid onState url: " + url + " for " + job.getId());
}
}
private void safeStartJob(String uuid) {
try {
startJob(uuid, false);
} catch (Exception ex) {
log.warn("[safe.start] " + uuid + " failed due to " + ex);
}
}
/**
* Perform cleanup tasks once per job completion. Triggered when the last running task transitions to an idle state.
* In particular: perform any onComplete/onError triggers, set the end time, and possibly do a fixdirs.
* @param job The job that just finished
* @param errored Whether the job ended up in error state
*/
private void finishJob(Job job, boolean errored) {
log.info("[job.done] " + job.getId() + " :: errored=" + errored + ". callback=" + job.getOnCompleteURL());
jobsCompletedPerHour.mark();
job.setFinishTime(System.currentTimeMillis());
spawnFormattedLogger.finishJob(job);
if (!quiesce) {
if (!errored) {
/* rekick if any task had more work to do */
if (job.hadMoreData()) {
log.warn("[job.done] " + job.getId() + " :: rekicking on more data");
try {
scheduleJob(job, false);
} catch (Exception ex) {
log.warn("", ex);
}
} else {
doOnState(job, job.getOnCompleteURL(), "onComplete");
if (ENABLE_JOB_FIXDIRS_ONCOMPLETE && job.getRunCount() > 1) {
// Perform a fixDirs on completion, cleaning up missing replicas/orphans.
fixTaskDir(job.getId(), -1, false, true);
}
}
} else {
doOnState(job, job.getOnErrorURL(), "onError");
}
}
balancer.requestJobSizeUpdate(job.getId(), 0);
}
private void quietBackgroundPost(String threadName, final String url, final byte[] post) {
new Thread(threadName) {
public void run() {
try {
HttpUtil.httpPost(url, "javascript/text", post, 60000);
} catch (Exception ex) {
log.warn("", ex);
}
}
}.start();
}
/**
* simpler wrapper for Runtime.exec() with logging
*/
private int exec(String cmd[]) throws InterruptedException, IOException {
if (debug("-exec-")) {
log.info("[exec.cmd] " + Strings.join(cmd, " "));
}
Process proc = Runtime.getRuntime().exec(cmd);
InputStream in = proc.getInputStream();
String buf = Bytes.toString(Bytes.readFully(in));
if (debug("-exec-") && buf.length() > 0) {
String lines[] = Strings.splitArray(buf, "\n");
for (String line : lines) {
log.info("[exec.out] " + line);
}
}
in = proc.getErrorStream();
buf = Bytes.toString(Bytes.readFully(in));
if (debug("-exec-") && buf.length() > 0) {
String lines[] = Strings.splitArray(buf, "\n");
for (String line : lines) {
log.info("[exec.err] " + line);
}
}
int exit = proc.waitFor();
if (debug("-exec-")) {
log.info("[exec.exit] " + exit);
}
return exit;
}
/**
* debug output, can be disabled by policy
*/
private boolean debug(String match) {
return debug != null && (debug.contains(match) || debug.contains("-all-"));
}
@VisibleForTesting
JobMacro createJobHostMacro(String job, int port) {
String sPort = Integer.valueOf(port).toString();
Set<String> jobHosts = new TreeSet<>();// best set?
jobLock.lock();
try {
Collection<HostState> hosts = listHostStatus(null);
Map<String, String> uuid2Host = new HashMap<>();
for (HostState host : hosts) {
if (host.isUp()) {
uuid2Host.put(host.getHostUuid(), host.getHost());
}
}
if (uuid2Host.size() == 0) {
log.warn("[createJobHostMacro] warning job was found on no available hosts: " + job);
}
IJob ijob = getJob(job);
if (ijob == null) {
log.warn("[createJobHostMacro] Unable to get job config for job: " + job);
throw new RuntimeException("[createJobHostMacro] Unable to get job config for job: " + job);
}
for (JobTask task : ijob.getCopyOfTasks()) {
String host = uuid2Host.get(task.getHostUUID());
if (host != null) {
jobHosts.add(host);
}
}
} finally {
jobLock.unlock();
}
List<String> hostStrings = new ArrayList<>();
for (String host : jobHosts) {
hostStrings.add("{host:\"" + host + "\", port:" + sPort + "}");
}
return new JobMacro("spawn", "createJobHostMacro-" + job, Joiner.on(',').join(hostStrings));
}
// TODO: 1. Why is this not in SpawnMQ? 2. Who actually listens to job config changes
// TODO: answer: this is for the web ui and live updating via SpawnManager /listen.batch
/**
* send job update event to registered listeners (usually http clients)
*/
private void sendJobUpdateEvent(Job job) {
jobLock.lock();
try {
if (jobConfigManager != null) {
jobConfigManager.updateJob(job);
}
} finally {
jobLock.unlock();
}
sendJobUpdateEvent("job.update", job);
}
public void queueJobTaskUpdateEvent(Job job) {
jobLock.lock();
try {
jobUpdateQueue.add(job.getId());
} finally {
jobLock.unlock();
}
}
public void drainJobTaskUpdateQueue() {
long start = System.currentTimeMillis();
Set<String> jobIds = new HashSet<String>();
jobUpdateQueue.drainTo(jobIds);
if (jobIds.size() > 0) {
if (log.isTraceEnabled()) {
log.trace("[drain] Draining " + jobIds.size() + " jobs from the update queue");
}
for (String jobId : jobIds) {
Job job = getJob(jobId);
sendJobUpdateEvent(job);
}
if (log.isTraceEnabled()) {
log.trace("[drain] Finished Draining " + jobIds.size() + " jobs from the update queue in " + (System.currentTimeMillis() - start) + "ms");
}
}
}
public void sendJobUpdateEvent(String label, Job job) {
try {
sendEventToClientListeners(label, getJobUpdateEvent(job));
} catch (Exception e) {
log.warn("", e);
}
}
/**
* This method adds a cluster.quiesce event to be sent to clientListeners to notify those using the UI that the cluster
* has been quiesced.
*
* @param username
*/
public void sendClusterQuiesceEvent(String username) {
try {
boolean quiesce = getSettings().getQuiesced();
JSONObject info = new JSONObject();
info.put("username", username);
info.put("date", JitterClock.globalTime());
info.put("quiesced", quiesce);
log.info("User " + username + " has " + (quiesce ? "quiesced" : "unquiesed") + " the cluster.");
sendEventToClientListeners("cluster.quiesce", info);
} catch (Exception e) {
log.warn("", e);
}
}
/**
* Adds the task.queue.size event to be sent to clientListeners on next batch.listen update
*/
public void sendTaskQueueUpdateEvent() {
try {
int numQueued = 0;
int numQueuedWaitingOnError = 0;
LinkedList<JobKey>[] queues = null;
taskQueuesByPriority.lock();
try {
//noinspection unchecked
queues = taskQueuesByPriority.values().toArray(new LinkedList[taskQueuesByPriority.size()]);
for (LinkedList<JobKey> queue : queues) {
numQueued += queue.size();
for (JobKey key : queue) {
Job job = getJob(key);
if (job != null && !job.isEnabled()) {
numQueuedWaitingOnError += 1;
}
}
}
lastQueueSize = numQueued;
} finally {
taskQueuesByPriority.unlock();
}
JSONObject json = new JSONObject("{'size':" + Integer.toString(numQueued) + ",'sizeErr':" + Integer.toString(numQueuedWaitingOnError) + "}");
sendEventToClientListeners("task.queue.size", json);
} catch (Exception e) {
log.warn("[task.queue.update] received exception while sending task queue update event (this is ok unless it happens repeatedly) " + e);
e.printStackTrace();
}
}
public int getLastQueueSize() {
return lastQueueSize;
}
public JSONObject getJobUpdateEvent(IJob job) throws Exception {
long files = 0;
long bytes = 0;
int running = 0;
int errored = 0;
int done = 0;
if (job == null) {
String errMessage = "getJobUpdateEvent called with null job";
log.warn(errMessage);
throw new Exception(errMessage);
}
List<JobTask> jobNodes = job.getCopyOfTasks();
int numNodes = 0;
if (jobNodes != null) {
numNodes = jobNodes.size();
for (JobTask task : jobNodes) {
files += task.getFileCount();
bytes += task.getByteCount();
if (task.getState() != JobTaskState.ALLOCATED && task.getState() != JobTaskState.QUEUED) {
running++;
}
switch (task.getState()) {
case IDLE:
done++;
break;
case ERROR:
done++;
errored++;
break;
}
}
}
JSONObject ojob = job.toJSON().put("config", "").put("parameters", "");
ojob.put("nodes", numNodes);
ojob.put("running", running);
ojob.put("errored", errored);
ojob.put("done", done);
ojob.put("files", files);
ojob.put("bytes", bytes);
return ojob;
}
protected void sendHostUpdateEvent(HostState state) {
sendHostUpdateEvent("host.update", state);
}
private void sendHostUpdateEvent(String label, HostState state) {
try {
sendEventToClientListeners(label, getHostStateUpdateEvent(state));
} catch (Exception e) {
log.warn("", e);
}
}
public JSONObject getHostStateUpdateEvent(HostState state) throws Exception {
if (state == null) {
return null;
}
JSONObject ohost = CodecJSON.encodeJSON(state);
ohost.put("spawnState", getSpawnStateString(state));
ohost.put("stopped", ohost.getJSONArray("stopped").length());
ohost.put("total", state.countTotalLive());
double score = 0;
try {
score = balancer.getHostScoreCached(state.getHostUuid());
} catch (NullPointerException npe) {
log.warn("[host.status] exception in getHostStateUpdateEvent", npe);
}
ohost.put("score", score);
return ohost;
}
private String getSpawnStateString(HostState state) {
if (state.isDead()) {
return "failed";
} else if (state.isDisabled()) {
return "disabled";
}
return hostFailWorker.getFailureStateString(state.getHostUuid(), state.isUp());
}
/**
* send codable message to registered listeners as json
*/
private void sendEventToClientListeners(final String topic, final JSONObject message) {
long time = System.currentTimeMillis();
for (Entry<String, ClientEventListener> ev : listeners.entrySet()) {
ClientEventListener client = ev.getValue();
// drop listeners we haven't heard from in a while
if (time - client.lastSeen > 60000) {
ClientEventListener listener = listeners.remove(ev.getKey());
if (debug("-listen-")) {
log.warn("[listen] dropping listener queue for " + ev.getKey() + " = " + listener);
}
continue;
}
try {
client.events.put(new ClientEvent(topic, message));
} catch (Exception ex) {
log.warn("", ex);
}
}
webSocketManager.addEvent(new ClientEvent(topic, message));
}
private class UpdateEventRunnable implements Runnable {
private final Map<String, Long> events = new HashMap<>();
@Override
public void run() {
HostCapacity hostmax = new HostCapacity();
HostCapacity hostused = new HostCapacity();
synchronized (monitored) {
for (HostState hs : monitored.values()) {
hostmax.add(hs.getMax());
hostused.add(hs.getUsed());
}
}
int jobshung = 0;
int jobrunning = 0;
int jobscheduled = 0;
int joberrored = 0;
int taskallocated = 0;
int taskbusy = 0;
int taskerrored = 0;
int taskqueued = 0;
long files = 0;
long bytes = 0;
jobLock.lock();
try {
for (Job job : spawnState.jobs.values()) {
for (JobTask jn : job.getCopyOfTasks()) {
switch (jn.getState()) {
case ALLOCATED:
taskallocated++;
break;
case BUSY:
taskbusy++;
break;
case ERROR:
taskerrored++;
break;
case IDLE:
break;
case QUEUED:
taskqueued++;
break;
}
files += jn.getFileCount();
bytes += jn.getByteCount();
}
switch (job.getState()) {
case IDLE:
break;
case RUNNING:
jobrunning++;
if (job.getStartTime() != null && job.getMaxRunTime() != null &&
(JitterClock.globalTime() - job.getStartTime() > job.getMaxRunTime() * 2)) {
jobshung++;
}
break;
case SCHEDULED:
jobscheduled++;
break;
}
if (job.getState() == JobState.ERROR) {
joberrored++;
}
}
} finally {
jobLock.unlock();
}
events.clear();
events.put("time", System.currentTimeMillis());
events.put("hosts", (long) monitored.size());
events.put("commands", (long) spawnState.commands.size());
events.put("macros", (long) spawnState.macros.size());
events.put("jobs", (long) spawnState.jobs.size());
events.put("cpus", (long) hostmax.getCpu());
events.put("cpus_used", (long) hostused.getCpu());
events.put("mem", (long) hostmax.getMem());
events.put("mem_used", (long) hostused.getMem());
events.put("io", (long) hostmax.getIo());
events.put("io_used", (long) hostused.getIo());
events.put("jobs_running", (long) jobrunning);
events.put("jobs_scheduled", (long) jobscheduled);
events.put("jobs_errored", (long) joberrored);
events.put("jobs_hung", (long) jobshung);
events.put("tasks_busy", (long) taskbusy);
events.put("tasks_allocated", (long) taskallocated);
events.put("tasks_queued", (long) taskqueued);
events.put("tasks_errored", (long) taskerrored);
events.put("files", files);
events.put("bytes", bytes);
spawnFormattedLogger.periodicState(events);
runningTaskCount.set(taskbusy);
queuedTaskCount.set(taskqueued);
failTaskCount.set(taskerrored);
runningJobCount.set(jobrunning);
queuedJobCount.set(jobscheduled);
failJobCount.set(joberrored);
hungJobCount.set(jobshung);
}
}
private void require(boolean test, String msg) throws Exception {
if (!test) {
throw new Exception("test failed with '" + msg + "'");
}
}
/**
* called by Thread registered to Runtime triggered by sig-kill
*/
void runtimeShutdownHook() {
shuttingDown.set(true);
try {
drainJobTaskUpdateQueue();
hostFailWorker.stop();
} catch (Exception ex) {
log.warn("", ex);
}
try {
if (spawnFormattedLogger != null) {
spawnFormattedLogger.close();
}
} catch (Exception ex) {
log.warn("", ex);
}
try {
closeZkClients();
} catch (Exception ex) {
log.warn("", ex);
}
}
/**
* re-kicks jobs which are on a repeating schedule
*/
private class JobRekickTask implements Runnable {
public void run() {
boolean kicked;
do {
kicked = false;
/*
* cycle through jobs and look for those that need nodes
* allocated. lock to prevent other RPCs from conflicting with scheduling.
*/
try {
if (!quiesce) {
String jobids[] = null;
jobLock.lock();
try {
jobids = new String[spawnState.jobs.size()];
jobids = spawnState.jobs.keySet().toArray(jobids);
} finally {
jobLock.unlock();
}
long clock = System.currentTimeMillis();
for (String jobid : jobids) {
Job job = getJob(jobid);
if (job == null) {
log.warn("ERROR: missing job for id " + jobid);
continue;
}
if (job.getState() == JobState.IDLE && job.getStartTime() == null && job.getEndTime() == null) {
job.setEndTime(clock);
}
// check for recurring jobs (that aren't already running)
if (job.shouldAutoRekick(clock)) {
try {
if (scheduleJob(job, false)) {
log.info("[schedule] rekicked " + job.getId());
kicked = true;
}
} catch (Exception ex) {
log.warn("[schedule] ex while rekicking, disabling " + job.getId());
job.setEnabled(false);
updateJob(job);
throw new Exception(ex);
}
}
}
}
} catch (Exception ex) {
log.warn("auto rekick failed: ", ex);
}
}
while (kicked);
}
}
protected void autobalance() {
autobalance(SpawnBalancer.RebalanceType.HOST, SpawnBalancer.RebalanceWeight.HEAVY);
}
protected void autobalance(SpawnBalancer.RebalanceType type, SpawnBalancer.RebalanceWeight weight) {
executeReallocationAssignments(balancer.getAssignmentsForAutoBalance(type, weight), false);
}
private boolean schedulePrep(Job job) {
job.setSubmitCommand(getCommand(job.getCommand()));
if (job.getSubmitCommand() == null) {
log.warn("[schedule] failed submit : invalid command " + job.getCommand());
return false;
}
return job.isEnabled();
}
private ReplicaTarget[] getTaskReplicaTargets(JobTask task, List<JobTaskReplica> replicaList) {
ReplicaTarget replicas[] = null;
if (replicaList != null) {
int next = 0;
replicas = new ReplicaTarget[replicaList.size()];
for (JobTaskReplica replica : replicaList) {
HostState host = getHostState(replica.getHostUUID());
if (host == null) {
log.warn("[getTaskReplicaTargets] error - replica host: " + replica.getHostUUID() + " does not exist!");
throw new RuntimeException("[getTaskReplicaTargets] error - replica host: " + replica.getHostUUID() + " does not exist. Rebalance the job to correct issue");
}
replicas[next++] = new ReplicaTarget(host.getHostUuid(), host.getHost(), host.getUser(), host.getPath(), task.getReplicationFactor());
}
}
return replicas;
}
/**
* Attempt to kick a task. Add it to the queue instead if appropriate.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for the job
* @param inQueue Whether the task is already in the queue (in which case we shouldn't add it again)
* @param ignoreQuiesce Whether the task can kick regardless of Spawn's quiesce state
* @throws Exception If there is a problem scheduling the task
*/
private void kickIncludingQueue(Job job, JobTask task, String config, boolean inQueue, boolean ignoreQuiesce) throws Exception {
boolean success = false;
while (!success && !shuttingDown.get()) {
jobLock.lock();
try {
if (taskQueuesByPriority.tryLock()) {
success = true;
boolean kicked = kickOnExistingHosts(job, task, config, 0L, true, ignoreQuiesce);
if (!kicked && !inQueue) {
addToTaskQueue(task.getJobKey(), ignoreQuiesce, false);
}
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
}
}
/**
* Schedule every task from a job.
*
* @param job Job to kick
* @param isManualKick If the kick is coming from the UI, which is specially allowed to run during quiesce
* @return True if the job is scheduled successfully
* @throws Exception If there is a problem scheduling a task
*/
private boolean scheduleJob(Job job, boolean isManualKick) throws Exception {
if (!schedulePrep(job)) {
return false;
}
job.setSubmitTime(JitterClock.globalTime());
job.setStartTime(null);
job.setEndTime(null);
job.setHadMoreData(false);
job.incrementRunCount();
log.info("[job.schedule] assigning " + job.getId() + " with " + job.getCopyOfTasks().size() + " tasks");
jobsStartedPerHour.mark();
for (JobTask task : job.getCopyOfTasks()) {
if (task == null || task.getState() != JobTaskState.IDLE) {
continue;
}
addToTaskQueue(task.getJobKey(), isManualKick && quiesce, false);
}
updateJob(job);
return true;
}
/* helper for SpawnMesh */
CommandTaskKick getCommandTaskKick(Job job, JobTask task) {
JobCommand jobCmd = job.getSubmitCommand();
final String expandedJob;
try {
expandedJob = expandJob(job);
} catch (TokenReplacerOverflowException e) {
return null;
}
CommandTaskKick kick = new CommandTaskKick(
task.getHostUUID(),
task.getJobKey(),
job.getPriority(),
job.getCopyOfTasks().size(),
job.getMaxRunTime() != null ? job.getMaxRunTime() * 60000 : 0,
job.getRunCount(),
expandedJob,
Strings.join(jobCmd.getCommand(), " "),
Strings.isEmpty(job.getKillSignal()) ? null : job.getKillSignal(),
job.getHourlyBackups(),
job.getDailyBackups(),
job.getWeeklyBackups(),
job.getMonthlyBackups(),
getTaskReplicaTargets(task, task.getAllReplicas())
);
kick.setRetries(job.getRetries());
return kick;
}
public class ScheduledTaskKick implements Runnable {
public String jobId;
public Collection<JobParameter> jobParameters;
public String jobConfig;
public String rawJobConfig;
public SpawnMQ spawnMQ;
public CommandTaskKick kick;
public Job job;
public JobTask task;
public ScheduledTaskKick(String jobId, Collection<JobParameter> jobParameters, String jobConfig, String rawJobConfig, SpawnMQ spawnMQ, CommandTaskKick kick, Job job, JobTask task) {
this.jobId = jobId;
this.jobParameters = jobParameters;
this.jobConfig = jobConfig;
this.rawJobConfig = rawJobConfig;
this.spawnMQ = spawnMQ;
this.kick = kick;
this.job = job;
this.task = task;
}
public void run() {
try {
if (jobConfig == null) {
jobConfig = expandJob(jobId, jobParameters, rawJobConfig);
}
kick.setConfig(jobConfig);
spawnMQ.sendJobMessage(kick);
if (debug("-task-")) {
log.info("[task.schedule] assigned " + jobId + "[" + kick.getNodeID() + "/" + (kick.getJobNodes() - 1) + "] to " + kick.getHostUuid());
}
} catch (Exception e) {
log.warn("failed to kick job " + jobId + " task " + kick.getNodeID() + " on host " + kick.getHostUuid() + ":\n" + e);
jobLock.lock();
try {
job.errorTask(task, JobTaskErrorCode.KICK_ERROR);
} finally {
jobLock.unlock();
}
}
}
}
/**
* Send a start message to a minion.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for job
* @return True if the start message is sent successfully
* @throws Exception If there is a problem scheduling a task
*/
public boolean scheduleTask(Job job, JobTask task, String config) throws Exception {
if (!schedulePrep(job)) {
return false;
}
if (task.getState() != JobTaskState.IDLE && task.getState() != JobTaskState.ERROR && task.getState() != JobTaskState.QUEUED) {
return false;
}
JobState oldState = job.getState();
if (!job.setTaskState(task, JobTaskState.ALLOCATED)) {
return false;
}
if (oldState == JobState.IDLE && job.getRunCount() <= task.getRunCount()) {
job.incrementRunCount();
job.setEndTime(null);
}
task.setRunCount(job.getRunCount());
task.setErrorCode(0);
task.setPreFailErrorCode(0);
JobCommand jobcmd = job.getSubmitCommand();
if (task.getRebalanceSource() != null && task.getRebalanceTarget() != null) {
// If a rebalance was stopped cleanly, resume it.
if (new TaskMover(this, task.getJobKey(), task.getRebalanceTarget(), task.getRebalanceSource(), false).execute(true)) {
return true;
}
}
CommandTaskKick kick = new CommandTaskKick(
task.getHostUUID(),
new JobKey(job.getId(), task.getTaskID()),
job.getPriority(),
job.getCopyOfTasks().size(),
job.getMaxRunTime() != null ? job.getMaxRunTime() * 60000 : 0,
job.getRunCount(),
null,
Strings.join(jobcmd.getCommand(), " "),
Strings.isEmpty(job.getKillSignal()) ? null : job.getKillSignal(),
job.getHourlyBackups(),
job.getDailyBackups(),
job.getWeeklyBackups(),
job.getMonthlyBackups(),
getTaskReplicaTargets(task, task.getAllReplicas())
);
kick.setRetries(job.getRetries());
// Creating a runnable to expand the job and send kick message outside of the main queue-iteration thread.
// Reason: the jobLock is held for duration of the queue-iteration and expanding some (kafka) jobs can be very
// slow. By making job expansion non-blocking we prevent other (UI) threads from waiting on zookeeper.
// Note: we make a copy of job id, parameters to ignore modifications from outside the queue-iteration thread
ArrayList<JobParameter> jobParameters = new ArrayList<>(); // deep clone of JobParameter list
for (JobParameter parameter : job.getParameters()) {
jobParameters.add(new JobParameter(parameter.getName(), parameter.getValue(), parameter.getDefaultValue()));
}
ScheduledTaskKick scheduledKick = new ScheduledTaskKick(job.getId(), jobParameters, config, getJobConfig(job.getId()), spawnMQ, kick, job, task);
expandKickExecutor.submit(scheduledKick);
return true;
}
/**
* Helper function for kickOnExistingHosts.
*
* @param task A task, typically one that is about to be kicked
* @return a List of HostStates from the task, either live or replica,
* that are unable to support a task kick (down, read-only, or scheduled to be failed)
*/
private List<HostState> hostsBlockingTaskKick(JobTask task) {
List<HostState> unavailable = new ArrayList<>();
HostState liveHost = getHostState(task.getHostUUID());
if (shouldBlockTaskKick(liveHost)) {
unavailable.add(liveHost);
}
List<JobTaskReplica> replicas = (task.getReplicas() != null ? task.getReplicas() : new ArrayList<JobTaskReplica>());
for (JobTaskReplica replica : replicas) {
HostState replicaHost = getHostState(replica.getHostUUID());
if (shouldBlockTaskKick(replicaHost)) {
unavailable.add(replicaHost);
}
}
return unavailable;
}
private boolean shouldBlockTaskKick(HostState host) {
return host == null || !host.canMirrorTasks() || host.isReadOnly() ||
hostFailWorker.getFailureState(host.getHostUuid()) == HostFailWorker.FailState.FAILING_FS_DEAD;
}
/**
* Attempt to find a host that has the capacity to run a task. Try the live host first, then any replica hosts,
* swapping onto them only if one is available and if allowed to do so.
*
* @param job Job to kick
* @param task Task to kick
* @param config Config for job
* @param timeOnQueue Time that the task has been on the queue
* @param allowSwap Whether to allow swapping to replica hosts
* @param ignoreQuiesce Whether any kicks that occur can ignore Spawn's quiesce state
* @return True if some host had the capacity to run the task and the task was sent there; false otherwise
* @throws Exception If there is a problem during task scheduling
*/
public boolean kickOnExistingHosts(Job job, JobTask task, String config, long timeOnQueue, boolean allowSwap, boolean ignoreQuiesce) throws Exception {
if (job == null || !job.isEnabled() ||
(job.getMaxSimulRunning() > 0 && job.getCountActiveTasks() >= job.getMaxSimulRunning())) {
return false;
}
boolean isNewTask = isNewTask(task);
List<HostState> unavailableHosts = hostsBlockingTaskKick(task);
if (isNewTask && !unavailableHosts.isEmpty()) {
boolean changed = replaceDownHosts(task);
if (changed) {
return false; // Reconsider the task the next time through the queue
}
}
if (!unavailableHosts.isEmpty()) {
log.warn("[taskQueuesByPriority] cannot kick " + task.getJobKey() + " because one or more of its hosts is down or scheduled to be failed: " + unavailableHosts.toString());
return false;
}
HostState liveHost = getHostState(task.getHostUUID());
if (liveHost.canMirrorTasks() && !liveHost.isReadOnly() && taskQueuesByPriority.shouldKickTaskOnHost(liveHost.getHostUuid())) {
taskQueuesByPriority.markHostKick(liveHost.getHostUuid(), false);
scheduleTask(job, task, config);
log.info("[taskQueuesByPriority] sending " + task.getJobKey() + " to " + task.getHostUUID());
return true;
} else if (allowSwap && !job.getDontAutoBalanceMe()) {
attemptKickTaskUsingSwap(job, task, isNewTask, ignoreQuiesce, timeOnQueue);
}
return false;
}
/**
* Attempt to kick a task under the assumption that the live host is unavailable.
*
* @param job The job to kick
* @param task The task to kick
* @param isNewTask Whether the task is new and has no existing data to move
* @param ignoreQuiesce Whether this task kick should ignore the quiesce state
* @param timeOnQueue How long the task has been on the queue
* @return True if the task was kicked
* @throws Exception
*/
private boolean attemptKickTaskUsingSwap(Job job, JobTask task, boolean isNewTask, boolean ignoreQuiesce, long timeOnQueue) throws Exception {
if (isNewTask) {
HostState host = findHostWithAvailableSlot(task, listHostStatus(job.getMinionType()), false);
if (host != null && swapTask(job.getId(), task.getTaskID(), host.getHostUuid(), true, ignoreQuiesce)) {
taskQueuesByPriority.markHostKick(host.getHostUuid(), false);
log.info("[taskQueuesByPriority] swapping " + task.getJobKey() + " onto " + host.getHostUuid());
return true;
}
return false;
} else if (task.getReplicas() != null) {
List<HostState> replicaHosts = new ArrayList<>();
for (JobTaskReplica replica : task.getReplicas()) {
replicaHosts.add(getHostState(replica.getHostUUID()));
}
HostState availReplica = findHostWithAvailableSlot(task, replicaHosts, false);
if (availReplica != null && swapTask(job.getId(), task.getTaskID(), availReplica.getHostUuid(), true, ignoreQuiesce)) {
taskQueuesByPriority.markHostKick(availReplica.getHostUuid(), true);
log.info("[taskQueuesByPriority] swapping " + task.getJobKey() + " onto " + availReplica.getHostUuid());
return true;
}
}
if (taskQueuesByPriority.isMigrationEnabled() && !job.getQueryConfig().getCanQuery() && !job.getDontAutoBalanceMe()) {
return attemptMigrateTask(job, task, timeOnQueue);
}
return false;
}
/**
* Select a host that can run a task
*
* @param task The task being moved
* @param hosts A collection of hosts
* @param forMigration Whether the host in question is being used for migration
* @return A suitable host that has an available task slot, if one exists; otherwise, null
*/
private HostState findHostWithAvailableSlot(JobTask task, List<HostState> hosts, boolean forMigration) {
if (hosts == null) {
return null;
}
for (HostState host : hosts) {
if (host == null || (forMigration && hostFailWorker.getFailureState(host.getHostUuid()) != HostFailWorker.FailState.ALIVE)) {
// Don't migrate onto hosts that are being failed in any capacity
continue;
}
if (host.canMirrorTasks() && !host.isReadOnly() && balancer.canReceiveNewTasks(host, false) &&
taskQueuesByPriority.shouldKickTaskOnHost(host.getHostUuid()) &&
(!forMigration || taskQueuesByPriority.shouldMigrateTaskToHost(task, host.getHostUuid()))) {
return host;
}
}
return null;
}
/**
* Consider migrating a task to a new host and run it there, subject to a limit on the overall number of such migrations
* to do per time interval and how many bytes are allowed to be migrated.
*
* @param job The job for the task to kick
* @param task The task to kick
* @param timeOnQueue How long the task has been on the queue
* @return True if the task was migrated
*/
private boolean attemptMigrateTask(Job job, JobTask task, long timeOnQueue) {
HostState target;
if (
!quiesce && // If spawn is not quiesced,
taskQueuesByPriority.checkSizeAgeForMigration(task.getByteCount(), timeOnQueue) &&
// and the task is small enough that migration is sensible
(target = findHostWithAvailableSlot(task, listHostStatus(job.getMinionType()), true)) != null)
// and there is a host with available capacity that can run the job,
{
// Migrate the task to the target host and kick it on completion
log.warn("Migrating " + task.getJobKey() + " to " + target.getHostUuid());
taskQueuesByPriority.markMigrationBetweenHosts(task.getHostUUID(), target.getHostUuid());
taskQueuesByPriority.markHostKick(target.getHostUuid(), true);
TaskMover tm = new TaskMover(this, task.getJobKey(), target.getHostUuid(), task.getHostUUID(), true);
tm.setMigration(true);
tm.execute(true);
return true;
}
return false;
}
protected boolean isNewTask(JobTask task) {
HostState liveHost = getHostState(task.getHostUUID());
return liveHost != null && !liveHost.hasLive(task.getJobKey()) && task.getFileCount() == 0 && task.getByteCount() == 0;
}
/**
* Add a jobkey to the appropriate task queue, given its priority
*
* @param jobKey The jobkey to add
* @param ignoreQuiesce Whether the task can kick regardless of Spawn's quiesce state
*/
public void addToTaskQueue(JobKey jobKey, boolean ignoreQuiesce, boolean toHead) {
Job job = getJob(jobKey.getJobUuid());
JobTask task = getTask(jobKey.getJobUuid(), jobKey.getNodeNumber());
if (job != null && task != null) {
if (balancer.hasFullDiskHost(task)) {
log.warn("[task.queue] task " + task.getJobKey() + " cannot run because one of its hosts has a full disk");
job.setTaskState(task, JobTaskState.DISK_FULL);
queueJobTaskUpdateEvent(job);
} else if (task.getState() == JobTaskState.QUEUED || job.setTaskState(task, JobTaskState.QUEUED)) {
log.info("[taskQueuesByPriority] adding " + jobKey + " to queue with ignoreQuiesce=" + ignoreQuiesce);
taskQueuesByPriority.addTaskToQueue(job.getPriority(), jobKey, ignoreQuiesce, toHead);
queueJobTaskUpdateEvent(job);
sendTaskQueueUpdateEvent();
} else {
log.warn("[task.queue] failed to add task " + jobKey + " with state " + task.getState());
}
}
}
/**
* Iterate over each queue looking for jobs that can run. By design, the queues are processed in descending order
* of priority, so we try priority 2 tasks before priority 1, etc.
*/
public void kickJobsOnQueue() {
LinkedList[] queues = null;
boolean success = false;
while (!success && !shuttingDown.get()) {
// need the job lock first
jobLock.lock();
try {
if (taskQueuesByPriority.tryLock()) {
success = true;
taskQueuesByPriority.setStoppedJob(false);
taskQueuesByPriority.updateAllHostAvailSlots(listHostStatus(null));
queues = taskQueuesByPriority.values().toArray(new LinkedList[taskQueuesByPriority.size()]);
for (LinkedList<SpawnQueueItem> queue : queues) {
iterateThroughTaskQueue(queue);
}
sendTaskQueueUpdateEvent();
}
} finally {
jobLock.unlock();
if (success) {
taskQueuesByPriority.unlock();
}
}
if (!success) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
}
}
/**
* Iterate over a particular queue of same-priority tasks, kicking any that can run.
* Must be inside of a block synchronized on the queue.
*
* @param queue The queue to look over
*/
private void iterateThroughTaskQueue(LinkedList<SpawnQueueItem> queue) {
ListIterator<SpawnQueueItem> iter = queue.listIterator(0);
int skippedQuiesceCount = 0;
long now = System.currentTimeMillis();
while (iter.hasNext() && !taskQueuesByPriority.getStoppedJob()) // Terminate if out of tasks or we stopped a job, requiring a queue modification
{
SpawnQueueItem key = iter.next();
Job job = getJob(key.getJobUuid());
JobTask task = getTask(key.getJobUuid(), key.getNodeNumber());
try {
boolean kicked;
if (job == null || task == null || task.getState() != JobTaskState.QUEUED) {
log.warn("[task.queue] removing invalid task " + key);
iter.remove();
continue;
}
if (quiesce && !key.getIgnoreQuiesce()) {
skippedQuiesceCount++;
if (log.isDebugEnabled()) {
log.debug("[task.queue] skipping " + key + " because spawn is quiesced and the kick wasn't manual");
}
continue;
} else {
kicked = kickOnExistingHosts(job, task, null, now - key.getCreationTime(), true, key.getIgnoreQuiesce());
}
if (kicked) {
log.info("[task.queue] removing kicked task " + task.getJobKey());
iter.remove();
}
} catch (Exception ex) {
log.warn("[task.queue] received exception during task kick: ", ex);
if (task != null && job != null) {
job.errorTask(task, JobTaskErrorCode.KICK_ERROR);
iter.remove();
queueJobTaskUpdateEvent(job);
}
}
}
if (skippedQuiesceCount > 0) {
log.warn("[task.queue] skipped " + skippedQuiesceCount + " queued tasks because spawn is quiesced and the kick wasn't manual");
}
}
/**
* browser polling event listener
*/
public static class ClientEventListener {
public long lastSeen;
public LinkedBlockingQueue<ClientEvent> events = new LinkedBlockingQueue<ClientEvent>();
}
/**
* event queued to a browser ClientListener
*/
public static final class ClientEvent implements Codec.Codable {
private String topic;
private JSONObject message;
public ClientEvent(String topic, JSONObject message) {
this.topic = topic;
this.message = message;
}
public String topic() {
return topic;
}
public JSONObject message() {
return message;
}
public JSONObject toJSON() throws Exception {
return new JSONObject().put("topic", topic).put("message", message);
}
@Override
public boolean equals(Object o) {
if (o instanceof ClientEvent) {
ClientEvent ce = (ClientEvent) o;
return ce.topic == topic && ce.message == message;
}
return false;
}
@Override
public int hashCode() {
return message.hashCode();
}
}
public WebSocketManager getWebSocketManager() {
return this.webSocketManager;
}
public boolean areAlertsEnabled() {
String alertsEnabled = null;
try {
alertsEnabled = spawnDataStore.get(SPAWN_COMMON_ALERT_PATH);
} catch (Exception ex) {
log.warn("Unable to read alerts status due to : " + ex.getMessage());
}
return alertsEnabled == null || alertsEnabled.equals("") || alertsEnabled.equals("true");
}
public void disableAlerts() throws Exception {
spawnDataStore.put(SPAWN_COMMON_ALERT_PATH, "false");
this.jobAlertRunner.disableAlerts();
}
public void enableAlerts() throws Exception {
spawnDataStore.put(SPAWN_COMMON_ALERT_PATH, "true");
this.jobAlertRunner.enableAlerts();
}
public List<String> getJobsToAutobalance() {
List<String> rv = new ArrayList<String>();
List<Job> autobalanceJobs = balancer.getJobsToAutobalance(listHostStatus(null));
if (autobalanceJobs == null) {
return rv;
}
for (Job job : autobalanceJobs) {
if (job.getId() != null) {
rv.add(job.getId());
}
}
return rv;
}
public long getTaskTrueSize(String jobId, int node) {
return balancer.getTaskTrueSize(getTask(jobId, node));
}
public void toggleHosts(String hosts, boolean disable) {
if (hosts != null) {
String[] hostsArray = hosts.split(",");
for (String host : hostsArray) {
if (host.isEmpty()) {
continue;
}
boolean changed;
synchronized (disabledHosts) {
changed = disable ? disabledHosts.add(host) : disabledHosts.remove(host);
}
if (changed) {
updateToggledHosts(host, disable);
}
}
writeState();
}
}
public void updateToggledHosts(String id, boolean disable) {
for (HostState host : listHostStatus(null)) {
if (id.equals(host.getHost()) || id.equals(host.getHostUuid())) {
host.setDisabled(disable);
sendHostUpdateEvent(host);
updateHostState(host);
}
}
}
/**
* simple settings wrapper allows changes to Spawn
*/
public class Settings {
public String getDebug() {
return debug;
}
public void setDebug(String debug) {
Spawn.this.debug = debug;
writeState();
}
public String getQueryHost() {
return queryHost;
}
public String getSpawnHost() {
return spawnHost;
}
public void setQueryHost(String queryHost) {
Spawn.this.queryHost = queryHost;
writeState();
}
public void setSpawnHost(String spawnHost) {
Spawn.this.spawnHost = spawnHost;
writeState();
}
public boolean getQuiesced() {
return quiesce;
}
public void setQuiesced(boolean quiesced) {
quiesceCount.clear();
if (quiesced) {
quiesceCount.inc();
}
Spawn.this.quiesce = quiesced;
writeState();
}
public String getDisabled() {
synchronized (disabledHosts) {
return Strings.join(disabledHosts.toArray(), ",");
}
}
public void setDisabled(String disabled) {
synchronized (disabledHosts) {
disabledHosts.clear();
disabledHosts.addAll(Arrays.asList(disabled.split(",")));
}
}
public JSONObject toJSON() throws JSONException {
return new JSONObject().put("debug", debug).put("quiesce", quiesce).put("queryHost", queryHost).put("spawnHost", spawnHost).put("disabled", getDisabled());
}
}
public void updateSpawnBalancerConfig(SpawnBalancerConfig newConfig) {
spawnState.balancerConfig = newConfig;
balancer.setConfig(newConfig);
}
public void writeSpawnBalancerConfig() {
try {
spawnDataStore.put(SPAWN_BALANCE_PARAM_PATH, new String(codec.encode(spawnState.balancerConfig)));
} catch (Exception e) {
log.warn("Warning: failed to persist SpawnBalancer parameters: ", e);
}
}
protected final void loadSpawnBalancerConfig() {
String configString = spawnDataStore.get(SPAWN_BALANCE_PARAM_PATH);
if (configString != null && !configString.isEmpty()) {
SpawnBalancerConfig loadedConfig = new SpawnBalancerConfig();
try {
codec.decode(loadedConfig, configString.getBytes());
updateSpawnBalancerConfig(loadedConfig);
} catch (Exception e) {
log.warn("Warning: failed to decode SpawnBalancerConfig: ", e);
}
}
}
public SpawnState getSpawnState() {
return spawnState;
}
public SpawnDataStore getSpawnDataStore() {
return spawnDataStore;
}
@VisibleForTesting
protected static class SpawnState implements Codec.Codable {
final ConcurrentMap<String, JobMacro> macros = new ConcurrentHashMapV8<>();
final ConcurrentMap<String, JobCommand> commands = new ConcurrentHashMapV8<>();
final ConcurrentMap<String, Job> jobs = new ConcurrentHashMapV8<>();
final DirectedGraph<String> jobDependencies = new DirectedGraph();
SpawnBalancerConfig balancerConfig = new SpawnBalancerConfig();
}
}
| Clear the target for stopped migrating task.
| hydra-main/src/main/java/com/addthis/hydra/job/Spawn.java | Clear the target for stopped migrating task. | <ide><path>ydra-main/src/main/java/com/addthis/hydra/job/Spawn.java
<ide> }
<ide> if (task.getState() == JobTaskState.QUEUED) {
<ide> removeFromQueue(task);
<del> log.warn("[taskQueuesByPriority] queued job " + jobUUID);
<add> log.warn("[task.stop] stopping queued " + task.getJobKey());
<ide> } else if (task.getState() == JobTaskState.REBALANCE) {
<del> log.warn("[task.stop] " + task.getJobKey() + " rebalance stopped with force=" + force);
<del> } else if (force && (task.getState() == JobTaskState.REVERT)) {
<add> log.warn("[task.stop] stopping rebalancing " + task.getJobKey() + " with force=" + force);
<add> } else if (task.getState() == JobTaskState.MIGRATING ) {
<add> log.warn("[task.stop] stopping migrating " + task.getJobKey());
<add> task.setRebalanceSource(null);
<add> task.setRebalanceTarget(null);
<add> }
<add> else if (force && (task.getState() == JobTaskState.REVERT)) {
<ide> log.warn("[task.stop] " + task.getJobKey() + " killed in state " + task.getState());
<ide> int code = JobTaskErrorCode.EXIT_REVERT_FAILURE;
<ide> job.errorTask(task, code); |
|
Java | mit | a1d0b27e570738f62a28a6054670dc6de53efc5c | 0 | juradoz/jdial,juradoz/jdial | package al.jdi.dao.beans;
import static org.apache.commons.lang3.StringUtils.EMPTY;
import static org.hibernate.criterion.Restrictions.eq;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.inject.Inject;
import org.hibernate.Query;
import org.hibernate.Session;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.slf4j.Logger;
import al.jdi.dao.model.Campanha;
import al.jdi.dao.model.Cliente;
import al.jdi.dao.model.EstadoCliente;
import al.jdi.dao.model.Filtro;
import al.jdi.dao.model.Mailing;
class DefaultClienteDao implements ClienteDao {
@Inject
private Logger logger;
private final DefaultDao<Cliente> dao;
DefaultClienteDao(Session session) {
this.dao = new DefaultDao<>(session, Cliente.class);
}
Collection<Integer> getFiltroAsInt(Campanha campanha) {
Collection<Integer> result = new LinkedList<Integer>();
for (Filtro filtro : campanha.getFiltro())
result.add(filtro.getCodigo());
return result;
}
boolean isFiltroExclusivo(Campanha campanha) {
if (!campanha.isFiltroAtivo())
return false;
for (Filtro filtro : campanha.getFiltro())
if (filtro.getMailing().isEmpty())
return false;
return true;
}
@Override
public void limpaReserva(Cliente cliente, int operadorDiscador, String nomeBaseDados) {
EstadoCliente estadoClienteAtivo =
new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
EstadoCliente estadoClienteReservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Discador");
cliente = procura(cliente.getId());
if (!cliente.getEstadoCliente().equals(estadoClienteReservado))
return;
cliente.setEstadoCliente(estadoClienteAtivo);
atualiza(cliente);
}
@Override
public void limpaReservas(Campanha campanha, String nomeBaseDados, String nomeBase, int operador) {
EstadoCliente ativo = new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
EstadoCliente reservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Discador");
dao.getSession()
.createSQLQuery(
"update Cliente c " + "inner join Mailing m on c.idMailing = m.idMailing "
+ "set c.idEstadoCliente = :ativo, " + "c.ultimaMudancaEstado = Now() "
+ "where c.idEstadoCliente = :reservado " + "and m.idCampanha = :campanha")
.setLong("ativo", ativo.getId()).setLong("reservado", reservado.getId())
.setLong("campanha", campanha.getId()).executeUpdate();
}
@Override
public int limpezaTemporaria(Campanha campanha, String nomeBaseDados, String nomeBase) {
return 0;
}
@Override
@SuppressWarnings("unchecked")
public Collection<Cliente> listaTudo(Campanha campanha, int maxResults) {
return dao.getSession().createCriteria(Cliente.class).setMaxResults(maxResults)
.createAlias("mailing", "m").add(eq("m.campanha", campanha)).list();
}
@Override
public Collection<Cliente> obtemAGGs(int quantidade, Campanha campanha, String nomeBaseDados,
String nomeBase, int operadorDiscador) {
List<Integer> idMailings = obtemIdMailings(campanha);
if (idMailings.isEmpty() && !possuiFiltro(campanha))
return Collections.<Cliente>emptyList();
String hql =
"select distinct Cliente.idCliente from Cliente "
+ " inner join InformacaoCliente on Cliente.idCliente = InformacaoCliente.idCliente"
+ " inner join Operador.DetCampanha on InformacaoCliente.chave = Operador.DetCampanha.CodDetCamp "
+ " inner join Agendamento on Cliente.idCliente = Agendamento.idCliente "
+ " left join Operador.FiltrosDet on Cliente.idCliente = Operador.FiltrosDet.idCliente "
+ "Where "
+ " (Cliente.disponivelAPartirDe is null or Cliente.disponivelAPartirDe <= Now()) "
+ " And Cliente.idEstadoCliente = 1 " + " And InformacaoCliente.nomeBase = '' "
+ " And Agendamento.agendamento <= Now() " + " And Agendamento.idAgente is null "
+ " And Operador.DetCampanha.OperadorCtt in (0, 3) "
+ " And Operador.DetCampanha.Situacao in (0, 1, 8) " + "%s "
+ "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
hql =
String.format(hql, !possuiFiltro(campanha) ? "And Cliente.idMailing in (:idMailings) "
: "And Operador.FiltrosDet.Filtro = :codigoFiltro ");
Query query = getSession().createSQLQuery(hql).setInteger("limit", quantidade);
if (!possuiFiltro(campanha))
query = query.setParameterList("idMailings", idMailings);
else
query = query.setInteger("codigoFiltro", campanha.getCodigoFiltro());
DateTime inicio = new DateTime();
LinkedList<Cliente> result = new LinkedList<Cliente>();
for (Object idCliente : query.list())
result.add(procura(((Integer) idCliente).longValue()));
logger.info("obtemAgendados demorou {}ms", new Duration(inicio, new DateTime()).getMillis());
return result;
}
@SuppressWarnings("unchecked")
List<Integer> obtemIdMailings(Campanha campanha) {
String hql =
isFiltroExclusivo(campanha) ? "select Filtro_Mailing.idMailing from Filtro inner join Filtro_Mailing on Filtro.idFiltro = Filtro_Mailing.idFiltro inner join Mailing on Filtro_Mailing.idMailing = Mailing.idMailing "
+ "where Filtro.idCampanha = :idCampanha and "
+ "Mailing.ativo = 1 and "
+ "(Mailing.dataInicial is null or Mailing.dataInicial <= Now()) and (Mailing.dataFinal is null or Mailing.dataFinal >= Now())"
: "select Mailing.idMailing from Mailing "
+ "where idCampanha = :idCampanha and "
+ "Mailing.ativo = 1 and (Mailing.dataInicial is null or Mailing.dataInicial <= Now()) and "
+ "(Mailing.dataFinal is null or Mailing.dataFinal >= Now())";
Query query = dao.getSession().createSQLQuery(hql);
query.setLong("idCampanha", campanha.getId());
List<Integer> idMailings = query.list();
return idMailings;
}
@Override
public Collection<Cliente> obtemLivres(int quantidade, Campanha campanha, String nomeBaseDados,
String nomeBase, int operadorDiscador) {
List<Integer> idMailings = obtemIdMailings(campanha);
if (idMailings.isEmpty() && !possuiFiltro(campanha))
return Collections.<Cliente>emptyList();
String hql =
"select distinct Cliente.idCliente from Cliente "
+ "inner join InformacaoCliente on Cliente.idCliente = InformacaoCliente.idCliente "
+ "left join Agendamento on Cliente.idCliente = Agendamento.idCliente "
+ "inner join Operador.DetCampanha on InformacaoCliente.chave = Operador.DetCampanha.CodDetCamp "
+ "left join Operador.FiltrosDet on Cliente.idCliente = Operador.FiltrosDet.idCliente "
+ "Where " + "Agendamento.idAgendamento is null "
+ "And (Cliente.disponivelAPartirDe is null or Cliente.disponivelAPartirDe <= Now()) "
+ "And Cliente.idEstadoCliente = 1 "
+ "And Operador.DetCampanha.OperadorCtt in (0, 3) "
+ "And Operador.DetCampanha.Situacao <= 1 " + "%s "
+ "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
hql =
String.format(hql, !possuiFiltro(campanha) ? "And Cliente.idMailing in (:idMailings) "
: "And Operador.FiltrosDet.Filtro = :codigoFiltro ");
Query query = getSession().createSQLQuery(hql).setInteger("limit", quantidade);
if (!possuiFiltro(campanha))
query = query.setParameterList("idMailings", idMailings);
else
query = query.setInteger("codigoFiltro", campanha.getCodigoFiltro());
DateTime inicio = new DateTime();
LinkedList<Cliente> result = new LinkedList<Cliente>();
for (Object idCliente : query.list())
result.add(procura(((Integer) idCliente).longValue()));
logger.info("obtemLivres demorou {}ms", new Duration(inicio, new DateTime()).getMillis());
return result;
}
boolean possuiFiltro(Campanha campanha) {
return campanha.isFiltroAtivo() && !getFiltroAsInt(campanha).isEmpty();
}
@Override
public Cliente procura(Mailing mailing, String chave) {
return (Cliente) dao.getSession().createCriteria(Cliente.class).add(eq("mailing", mailing))
.createCriteria("informacaoCliente").add(eq("chave", chave)).uniqueResult();
}
@Override
public void retornaReservadosOperador(Campanha campanha) {
EstadoCliente ativo = new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
if (ativo == null)
return;
EstadoCliente reservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Operador");
if (reservado == null)
return;
DateTime limite = new DateTime().minusHours(2);
dao.getSession()
.createSQLQuery(
"update Cliente c " + "inner join Telefone t on t.idCliente = c.idCliente "
+ "inner join Mailing m on c.idMailing = m.idMailing "
+ "set c.idEstadoCliente = :idEstadoClienteAtivo, "
+ "c.ultimaMudancaEstado = Now() " + "where " + "m.idCampanha = :idCampanha "
+ "and c.idEstadoCliente = :idEstadoClienteReservadoOperador "
+ "and c.ultimaMudancaEstado = :limiteReserva")
.setLong("idEstadoClienteAtivo", ativo.getId()).setLong("idCampanha", campanha.getId())
.setLong("idEstadoClienteReservadoOperador", reservado.getId())
.setTimestamp("limiteReserva", limite.toDate()).executeUpdate();
}
@Override
public String getDigitoSaida(Cliente cliente) {
return EMPTY;
}
@Override
public void adiciona(Cliente t) {
dao.adiciona(t);
}
@Override
public void atualiza(Cliente t) {
dao.atualiza(t);
}
@Override
public List<Cliente> listaTudo() {
return dao.listaTudo();
}
@Override
public Cliente procura(Long id) {
return dao.procura(id);
}
@Override
public void remove(Cliente u) {
dao.remove(u);
}
Session getSession() {
return dao.getSession();
}
@Override
public Cliente procura(String s) {
return dao.procura(s);
}
}
| jdial-dao/src/main/java/al/jdi/dao/beans/DefaultClienteDao.java | package al.jdi.dao.beans;
import static org.apache.commons.lang3.StringUtils.EMPTY;
import static org.hibernate.criterion.Restrictions.eq;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.inject.Inject;
import org.hibernate.Query;
import org.hibernate.Session;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.slf4j.Logger;
import al.jdi.dao.model.Campanha;
import al.jdi.dao.model.Cliente;
import al.jdi.dao.model.EstadoCliente;
import al.jdi.dao.model.Filtro;
import al.jdi.dao.model.Mailing;
class DefaultClienteDao implements ClienteDao {
@Inject
private Logger logger;
private final DefaultDao<Cliente> dao;
DefaultClienteDao(Session session) {
this.dao = new DefaultDao<>(session, Cliente.class);
}
Collection<Integer> getFiltroAsInt(Campanha campanha) {
Collection<Integer> result = new LinkedList<Integer>();
for (Filtro filtro : campanha.getFiltro())
result.add(filtro.getCodigo());
return result;
}
boolean isFiltroExclusivo(Campanha campanha) {
if (!campanha.isFiltroAtivo())
return false;
for (Filtro filtro : campanha.getFiltro())
if (filtro.getMailing().isEmpty())
return false;
return true;
}
@Override
public void limpaReserva(Cliente cliente, int operadorDiscador, String nomeBaseDados) {
EstadoCliente estadoClienteAtivo =
new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
EstadoCliente estadoClienteReservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Discador");
cliente = procura(cliente.getId());
if (!cliente.getEstadoCliente().equals(estadoClienteReservado))
return;
cliente.setEstadoCliente(estadoClienteAtivo);
atualiza(cliente);
}
@Override
public void limpaReservas(Campanha campanha, String nomeBaseDados, String nomeBase, int operador) {
EstadoCliente ativo = new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
EstadoCliente reservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Discador");
dao.getSession()
.createSQLQuery(
"update Cliente c " + "inner join Mailing m on c.idMailing = m.idMailing "
+ "set c.idEstadoCliente = :ativo, " + "c.ultimaMudancaEstado = Now() "
+ "where c.idEstadoCliente = :reservado " + "and m.idCampanha = :campanha")
.setLong("ativo", ativo.getId()).setLong("reservado", reservado.getId())
.setLong("campanha", campanha.getId()).executeUpdate();
}
@Override
public int limpezaTemporaria(Campanha campanha, String nomeBaseDados, String nomeBase) {
return 0;
}
@Override
@SuppressWarnings("unchecked")
public Collection<Cliente> listaTudo(Campanha campanha, int maxResults) {
return dao.getSession().createCriteria(Cliente.class).setMaxResults(maxResults)
.createAlias("mailing", "m").add(eq("m.campanha", campanha)).list();
}
@Override
public Collection<Cliente> obtemAGGs(int quantidade, Campanha campanha, String nomeBaseDados,
String nomeBase, int operadorDiscador) {
List<Integer> idMailings = obtemIdMailings(campanha);
if (idMailings.isEmpty() && !possuiFiltro(campanha))
return Collections.<Cliente>emptyList();
String hql =
"select distinct Cliente.idCliente from Cliente "
+ " inner join InformacaoCliente on Cliente.idCliente = InformacaoCliente.idCliente"
+ " inner join Operador.DetCampanha on InformacaoCliente.chave = Operador.DetCampanha.CodDetCamp "
+ " inner join Agendamento on Cliente.idCliente = Agendamento.idCliente "
+ " left join Operador.FiltrosDet on Cliente.idCliente = Operador.FiltrosDet.idCliente "
+ "Where "
+ " (Cliente.disponivelAPartirDe is null or Cliente.disponivelAPartirDe <= Now()) "
+ " And Cliente.idEstadoCliente = 1 " + " And InformacaoCliente.nomeBase = '' "
+ " And Agendamento.agendamento <= Now() " + " And Agendamento.idAgente is null "
+ " And Operador.DetCampanha.OperadorCtt in (0, 3) "
+ " And Operador.DetCampanha.Situacao in (0, 1, 8) " + "%s " // And Cliente.idMailing
// in (:idMailings) -- Sem
// filtro
// And
// Operador.FiltrosDet.Filtro
// = :codigoFiltro --
// Comfiltro
+ "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
hql =
String.format(hql, !possuiFiltro(campanha) ? "And Cliente.idMailing in (:idMailings) "
: "And Operador.FiltrosDet.Filtro = :codigoFiltro ");
Query query = getSession().createSQLQuery(hql).setInteger("limit", quantidade);
if (!possuiFiltro(campanha))
query = query.setParameterList("idMailings", idMailings);
else
query = query.setInteger("codigoFiltro", campanha.getCodigoFiltro());
DateTime inicio = new DateTime();
LinkedList<Cliente> result = new LinkedList<Cliente>();
for (Object idCliente : query.list())
result.add(procura(((Integer) idCliente).longValue()));
logger.info("obtemAgendados demorou {}ms", new Duration(inicio, new DateTime()).getMillis());
return result;
}
@SuppressWarnings("unchecked")
List<Integer> obtemIdMailings(Campanha campanha) {
String hql =
isFiltroExclusivo(campanha) ? "select Filtro_Mailing.idMailing from Filtro inner join Filtro_Mailing on Filtro.idFiltro = Filtro_Mailing.idFiltro inner join Mailing on Filtro_Mailing.idMailing = Mailing.idMailing "
+ "where Filtro.idCampanha = :idCampanha and "
+ "Mailing.ativo = 1 and "
+ "(Mailing.dataInicial is null or Mailing.dataInicial <= Now()) and (Mailing.dataFinal is null or Mailing.dataFinal >= Now())"
: "select Mailing.idMailing from Mailing "
+ "where idCampanha = :idCampanha and "
+ "Mailing.ativo = 1 and (Mailing.dataInicial is null or Mailing.dataInicial <= Now()) and "
+ "(Mailing.dataFinal is null or Mailing.dataFinal >= Now())";
Query query = dao.getSession().createSQLQuery(hql);
query.setLong("idCampanha", campanha.getId());
List<Integer> idMailings = query.list();
return idMailings;
}
@Override
public Collection<Cliente> obtemLivres(int quantidade, Campanha campanha, String nomeBaseDados,
String nomeBase, int operadorDiscador) {
List<Integer> idMailings = obtemIdMailings(campanha);
if (idMailings.isEmpty() && !possuiFiltro(campanha))
return Collections.<Cliente>emptyList();
String hql =
"select distinct Cliente.idCliente from Cliente "
+ "inner join InformacaoCliente on Cliente.idCliente = InformacaoCliente.idCliente "
+ "left join Agendamento on Cliente.idCliente = Agendamento.idCliente "
+ "inner join Operador.DetCampanha on InformacaoCliente.chave = Operador.DetCampanha.CodDetCamp "
+ "left join Operador.FiltrosDet on Cliente.idCliente = Operador.FiltrosDet.idCliente "
+ "Where " + "Agendamento.idAgendamento is null "
+ "And (Cliente.disponivelAPartirDe is null or Cliente.disponivelAPartirDe <= Now()) "
+ "And Cliente.idEstadoCliente = 1 "
+ "And Operador.DetCampanha.OperadorCtt in (0, 3) "
+ "And Operador.DetCampanha.Situacao <= 1 " + "%s " // And Cliente.idMailing in
// (:idMailings) -- Sem filtro
// And Operador.FiltrosDet.Filtro =
// :codigoFiltro -- Com filtro
+ "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
hql =
String.format(hql, !possuiFiltro(campanha) ? "And Cliente.idMailing in (:idMailings) "
: "And Operador.FiltrosDet.Filtro = :codigoFiltro ");
Query query = getSession().createSQLQuery(hql).setInteger("limit", quantidade);
if (!possuiFiltro(campanha))
query = query.setParameterList("idMailings", idMailings);
else
query = query.setInteger("codigoFiltro", campanha.getCodigoFiltro());
DateTime inicio = new DateTime();
LinkedList<Cliente> result = new LinkedList<Cliente>();
for (Object idCliente : query.list())
result.add(procura(((Integer) idCliente).longValue()));
logger.info("obtemLivres demorou {}ms", new Duration(inicio, new DateTime()).getMillis());
return result;
}
boolean possuiFiltro(Campanha campanha) {
return campanha.isFiltroAtivo() && !getFiltroAsInt(campanha).isEmpty();
}
@Override
public Cliente procura(Mailing mailing, String chave) {
return (Cliente) dao.getSession().createCriteria(Cliente.class).add(eq("mailing", mailing))
.createCriteria("informacaoCliente").add(eq("chave", chave)).uniqueResult();
}
@Override
public void retornaReservadosOperador(Campanha campanha) {
EstadoCliente ativo = new DefaultEstadoClienteDao(dao.getSession()).procura("Ativo");
if (ativo == null)
return;
EstadoCliente reservado =
new DefaultEstadoClienteDao(dao.getSession()).procura("Reservado pelo Operador");
if (reservado == null)
return;
DateTime limite = new DateTime().minusHours(2);
dao.getSession()
.createSQLQuery(
"update Cliente c " + "inner join Telefone t on t.idCliente = c.idCliente "
+ "inner join Mailing m on c.idMailing = m.idMailing "
+ "set c.idEstadoCliente = :idEstadoClienteAtivo, "
+ "c.ultimaMudancaEstado = Now() " + "where " + "m.idCampanha = :idCampanha "
+ "and c.idEstadoCliente = :idEstadoClienteReservadoOperador "
+ "and c.ultimaMudancaEstado = :limiteReserva")
.setLong("idEstadoClienteAtivo", ativo.getId()).setLong("idCampanha", campanha.getId())
.setLong("idEstadoClienteReservadoOperador", reservado.getId())
.setTimestamp("limiteReserva", limite.toDate()).executeUpdate();
}
@Override
public String getDigitoSaida(Cliente cliente) {
return EMPTY;
}
@Override
public void adiciona(Cliente t) {
dao.adiciona(t);
}
@Override
public void atualiza(Cliente t) {
dao.atualiza(t);
}
@Override
public List<Cliente> listaTudo() {
return dao.listaTudo();
}
@Override
public Cliente procura(Long id) {
return dao.procura(id);
}
@Override
public void remove(Cliente u) {
dao.remove(u);
}
Session getSession() {
return dao.getSession();
}
@Override
public Cliente procura(String s) {
return dao.procura(s);
}
}
| Cleared dummy comments | jdial-dao/src/main/java/al/jdi/dao/beans/DefaultClienteDao.java | Cleared dummy comments | <ide><path>dial-dao/src/main/java/al/jdi/dao/beans/DefaultClienteDao.java
<ide> + " And Cliente.idEstadoCliente = 1 " + " And InformacaoCliente.nomeBase = '' "
<ide> + " And Agendamento.agendamento <= Now() " + " And Agendamento.idAgente is null "
<ide> + " And Operador.DetCampanha.OperadorCtt in (0, 3) "
<del> + " And Operador.DetCampanha.Situacao in (0, 1, 8) " + "%s " // And Cliente.idMailing
<del> // in (:idMailings) -- Sem
<del> // filtro
<del> // And
<del> // Operador.FiltrosDet.Filtro
<del> // = :codigoFiltro --
<del> // Comfiltro
<add> + " And Operador.DetCampanha.Situacao in (0, 1, 8) " + "%s "
<ide> + "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
<ide>
<ide> hql =
<ide> + "And (Cliente.disponivelAPartirDe is null or Cliente.disponivelAPartirDe <= Now()) "
<ide> + "And Cliente.idEstadoCliente = 1 "
<ide> + "And Operador.DetCampanha.OperadorCtt in (0, 3) "
<del> + "And Operador.DetCampanha.Situacao <= 1 " + "%s " // And Cliente.idMailing in
<del> // (:idMailings) -- Sem filtro
<del> // And Operador.FiltrosDet.Filtro =
<del> // :codigoFiltro -- Com filtro
<add> + "And Operador.DetCampanha.Situacao <= 1 " + "%s "
<ide> + "order by Cliente.ordemDaFila asc , Cliente.ordem asc " + "limit :limit";
<ide>
<ide> hql = |
|
Java | apache-2.0 | 146b522325cc95d7f1023110eb5e9bd432e038df | 0 | Alesandrus/aivanov,Alesandrus/aivanov,Alesandrus/aivanov | package ru.job4j.tree;
import java.util.ArrayList;
import java.util.List;
/**
* Class BinTree.
*
* @author Alexander Ivanov
* @since 10.04.2017
* @version 1.0
* @param <E> type of elements.
*/
public class BinTree<E extends Comparable> {
/**
* Node-root.
*/
private Node<E> root;
/**
* Put key to tree.
* Put only unique keys.
* @param key for adding to tree.
*/
public void put(E key) {
Node<E> nextNode = root;
Node<E> prevNode = null;
while (nextNode != null) {
int comp = key.compareTo(nextNode.key);
if (comp < 0) {
prevNode = nextNode;
nextNode = nextNode.left;
} else if (comp > 0) {
prevNode = nextNode;
nextNode = nextNode.right;
} else {
return;
}
}
Node<E> newNode = new Node<>(key);
if (prevNode == null) {
root = newNode;
} else {
if (key.compareTo(prevNode.key) < 0) {
prevNode.left = newNode;
} else {
prevNode.right = newNode;
}
}
}
/**
* Get all keys of tree.
* @return list of keys.
*/
public List<E> getAllKeys() {
List<E> list = new ArrayList<E>();
if (root != null) {
list.add(root.key);
getRecursiveList(root, list);
}
return list;
}
/**
* Addition recursive method for getting all keys.
* @param node parent.
* @param list for adding.
* @return list of keys.
*/
private List<E> getRecursiveList(Node<E> node, List<E> list) {
Node<E> rootNode = node;
if (rootNode.left != null) {
list.add(rootNode.left.key);
getRecursiveList(rootNode.left, list);
}
if (rootNode.right != null) {
list.add(rootNode.right.key);
getRecursiveList(rootNode.right, list);
}
return list;
}
/**
* Check tree for containing key.
* @param key for check.
* @return true if tree contains key.
*/
public boolean contains(E key) {
Node<E> node = root;
while (node != null) {
int comp = key.compareTo(node.key);
if (comp == 0) {
return true;
} else {
if (comp > 0) {
node = node.right;
} else {
node = node.left;
}
}
}
return false;
}
/**
* Node for building tree.
* @param <E> type of key.
*/
private class Node<E> {
/**
* Key for storage.
*/
private E key;
/**
* Reference for left node.
*/
private Node<E> left;
/**
* Reference for right node.
*/
private Node<E> right;
/**
* Constructor for Node.
* @param key for storage.
*/
Node(E key) {
this.key = key;
}
}
}
| chapter_005_Collections_Pro/src/main/java/ru/job4j/tree/BinTree.java | package ru.job4j.tree;
import java.util.ArrayList;
import java.util.List;
/**
* Class BinTree.
*
* @author Alexander Ivanov
* @since 10.04.2017
* @version 1.0
* @param <E> type of elements.
*/
public class BinTree<E extends Comparable> {
/**
* Node-root.
*/
private Node<E> root;
/**
* Put key to tree.
* Put only unique keys.
* @param key for adding to tree.
*/
public void put(E key) {
Node<E> nextNode = root;
Node<E> prevNode = null;
while (nextNode != null) {
int comp = key.compareTo(nextNode.key);
if (comp < 0) {
prevNode = nextNode;
nextNode = nextNode.left;
} else if (comp > 0) {
prevNode = nextNode;
nextNode = nextNode.right;
} else {
return;
}
}
Node<E> newNode = new Node<>(key);
if (prevNode == null) {
root = newNode;
} else {
if (key.compareTo(prevNode.key) < 0) {
prevNode.left = newNode;
} else {
prevNode.right = newNode;
}
}
}
/**
* Get all keys of tree.
* @return list of keys.
*/
public List<E> getAllKeys() {
List<E> list = new ArrayList<E>();
if (root != null) {
list.add(root.key);
getRecursiveList(root, list);
}
return list;
}
/**
* Addition recursive method for getting all keys.
* @param node parent.
* @param list for adding.
* @return list of keys.
*/
private List<E> getRecursiveList(Node<E> node, List<E> list) {
Node<E> rootNode = node;
if (rootNode.left != null) {
list.add(rootNode.left.key);
getRecursiveList(rootNode.left, list);
}
if (rootNode.right != null) {
list.add(rootNode.right.key);
getRecursiveList(rootNode.right, list);
}
return list;
}
/**
* Check tree for containing key.
* @param key for check.
* @return true if tree contains key.
*/
public boolean contains(E key) {
Node<E> node = root;
while (node != null) {
int comp = key.compareTo(node.key);
if (comp == 0) {
return true;
} else {
if (comp > 0) {
node = node.right;
} else {
node = node.left;
}
}
}
return false;
}
/**
* Node for building tree.
* @param <E> type of key.
*/
private class Node<E> {
/**
* Key for storage.
*/
private E key;
/**
* Reference for left node.
*/
private Node<E> left;
/**
* Reference for right node.
*/
private Node<E> right;
/**
* Constructor for Node.
* @param key for storage.
*/
Node(E key) {
this.key = key;
}
}
/**
* PSVM.
* @param args of string.
*/
public static void main(String[] args) {
BinTree<Integer> tree = new BinTree<>();
tree.put(8);
tree.put(4);
tree.put(7);
tree.put(7);
tree.put(25);
System.out.println(tree.contains(25));
List<Integer> list = tree.getAllKeys();
for (Integer i : list) {
System.out.println(i);
}
}
}
| Add pictures.
| chapter_005_Collections_Pro/src/main/java/ru/job4j/tree/BinTree.java | Add pictures. | <ide><path>hapter_005_Collections_Pro/src/main/java/ru/job4j/tree/BinTree.java
<ide> this.key = key;
<ide> }
<ide> }
<del>
<del> /**
<del> * PSVM.
<del> * @param args of string.
<del> */
<del> public static void main(String[] args) {
<del> BinTree<Integer> tree = new BinTree<>();
<del> tree.put(8);
<del> tree.put(4);
<del> tree.put(7);
<del> tree.put(7);
<del> tree.put(25);
<del> System.out.println(tree.contains(25));
<del> List<Integer> list = tree.getAllKeys();
<del> for (Integer i : list) {
<del> System.out.println(i);
<del> }
<del> }
<ide> } |
|
Java | mit | bb542fe1423ea1ebe1652f6f18e3c94db698f73e | 0 | xingda920813/HelloCamera2 | package com.xdandroid.hellocamera2.util;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import com.xdandroid.hellocamera2.app.App;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class BitmapUtils {
/**
* 将拍照得到的图片按照取景框(亮色区域)的范围进行裁剪.
* 对于1280x720的屏幕,裁剪起始点为坐标(52, 80),裁剪后,位图尺寸为896x588.(由layout xml定义的布局计算得到)
* 以上参数将按照图片的实际大小,进行等比例换算。
*
* @param originalBitmap 拍照得到的Bitmap
* @return 裁剪之后的Bitmap
*/
public static Bitmap crop(Bitmap originalBitmap) {
double originalWidth = originalBitmap.getWidth();
double originalHeight = originalBitmap.getHeight();
double scaleX = originalWidth / 1280;
scaleX = scaleX * 1.04;
double scaleY = originalHeight / 720;
int x = (int) (52 * scaleX + 0.5);
int y = (int) (80 * scaleY + 0.5);
int width = (int) (896 * scaleX + 0.5);
int height = (int) (588 * scaleY + 0.5);
Bitmap bitmap = Bitmap.createBitmap(originalBitmap, x, y, width, height);
originalBitmap.recycle();
return bitmap;
}
/**
* 若图片宽小于高,则逆时针旋转90° ; 否则,返回原图片.
* 适用于调用系统相机进行拍照,且希望图片总是横向的场景。
*
* @param sourceBitmap 拍照得到的Bitmap
* @return 若图片宽小于高,返回逆时针旋转90°后的Bitmap ; 否则,返回原Bitmap.
*/
public static Bitmap rotate(Bitmap sourceBitmap) {
int sourceWidth = sourceBitmap.getWidth();
int sourceHeight = sourceBitmap.getHeight();
if (sourceWidth >= sourceHeight) return sourceBitmap;
int maxInWidthAndHeight = Math.max(sourceWidth, sourceHeight);
Bitmap destBitmap = Bitmap.createBitmap(maxInWidthAndHeight, maxInWidthAndHeight, Bitmap.Config.ARGB_8888);
Matrix m = new Matrix();
m.setRotate(-90, maxInWidthAndHeight / 2, maxInWidthAndHeight / 2);
Canvas canvas = new Canvas(destBitmap);
canvas.drawBitmap(sourceBitmap, m, new Paint());
sourceBitmap.recycle();
return destBitmap;
}
/**
* 将图片文件压缩到所需的大小,返回位图对象.
* 若原图尺寸小于需要压缩到的尺寸,则返回原图.
* 该方法不处理图片方向问题,需要确保File的长宽比与req中的比例相近.
*
* @param filePath 图片File
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
*/
public static Bitmap compress(File filePath, int reqWidth, int reqHeight) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath.toString(), options);
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
options.inJustDecodeBounds = false;
//inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
inSampleSizedBitmap.recycle();
return scaledBitmap;
}
/**
* 将图片文件压缩到所需的大小,返回位图对象.
* 若原图尺寸小于需要压缩到的尺寸,则返回原图.
* 该方法考虑了图片方向问题,因此可始终传入reqWidth > reqHeight.
*
* @param filePath 图片File
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
*/
@SuppressWarnings("SuspiciousNameCombination")
public static Bitmap compressConsideringRotation(File filePath, int reqWidth, int reqHeight) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath.toString(), options);
boolean shouldRotate = options.outWidth < options.outHeight;
if (shouldRotate) {
options.inSampleSize = calculateInSampleSize(options, reqHeight, reqWidth);
} else {
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
}
options.inJustDecodeBounds = false;
//inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
inSampleSizedBitmap.recycle();
return scaledBitmap;
}
/**
* 将图片文件压缩到所需的大小,返回位图对象.
* 若原图尺寸小于需要压缩到的尺寸,则返回原图.
* 该方法通过分辨率面积得到inSampleSize,因此不存在图片方向问题.
*
* @param filePath
* @param reqSquarePixels
* @return
*/
public static Bitmap compressToResolution(File filePath, long reqSquarePixels) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath.toString(), options);
options.inSampleSize = calculateInSampleSizeBySquare(options, reqSquarePixels);
options.inJustDecodeBounds = false;
//inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
inSampleSizedBitmap.recycle();
return scaledBitmap;
}
/**
* 计算压缩参数BitmapFactory.Options.inSampleSize.
* 考虑了options的长宽比可能与req中的比例不同的情况.
* 该方法通过分辨率面积得到inSampleSize,因此不存在图片方向问题.
*
* @param options BitmapFactory.Options
* @param reqSquarePixels 压缩后的分辨率面积
* @return 计算得到的BitmapFactory.Options.inSampleSize
*/
private static int calculateInSampleSizeBySquare(BitmapFactory.Options options, long reqSquarePixels) {
long squarePixels = options.outWidth * options.outHeight;
if (squarePixels <= reqSquarePixels) return 1;
double powwedScale = ((double) squarePixels) / ((double) reqSquarePixels);
double scale = Math.sqrt(powwedScale);
double log = Math.log(scale) / Math.log(2);
double logCeil = Math.ceil(log);
return (int) Math.pow(2, logCeil);
}
/**
* 计算压缩参数BitmapFactory.Options.inSampleSize.
* 考虑了options中的长宽比可能与req的长宽比不同的情况.
*
* @param options BitmapFactory.Options
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 计算得到的BitmapFactory.Options.inSampleSize
*/
private static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
int width = options.outWidth;
int height = options.outHeight;
double ratio = ((double) width) / ((double) height);
double reqRatio = ((double) reqWidth) / ((double) reqHeight);
/**
* 16:9 -> 4:3 : 3840x2160 -> 1600x1200 : 取height之比作为scale
* 3:4 -> 9:16 : 2400x3200 -> 1080x1920 : 取height之比作为scale
* 9:16 -> 3:4 : 2160x3840 -> 1200x1600 : 取width之比作为scale
* 4:3 -> 16:9 : 3200x2400 -> 1920x1080 : 取width之比作为scale
*/
boolean shouldUseWidthScale = ratio <= reqRatio;
if (width <= reqWidth && height <= reqHeight) return 1;
double scale = shouldUseWidthScale ? ((double) width) / ((double) reqWidth) : ((double) height) / ((double) reqHeight);
double log = Math.log(scale) / Math.log(2);
double logCeil = Math.ceil(log);
return (int) Math.pow(2, logCeil);
}
/**
* BitmapFactory.decodeFile(String pathName, Options opts)的快捷方法.
*
* @param filePath 图片File
* @return 解码文件得到的Bitmap.
*/
public static Bitmap decodeFile(File filePath) {
return BitmapFactory.decodeFile(filePath.toString(), null);
}
/**
* 将Bitmap写入文件,文件位于外置存储上该应用包名目录的cache子目录中.
*
* @param bitmap 要写入的Bitmap
* @param fileName 文件名
* @return 文件对应的File.
*/
public static File writeBitmapToFile(Bitmap bitmap, String fileName) {
String pathTo = App.app.getExternalCacheDir() + "/" + fileName;
File file = new File(pathTo);
FileOutputStream fos;
try {
if (file.exists()) file.delete();
fos = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fos);
fos.flush();
bitmap.recycle();
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
return file;
}
}
| app/src/main/java/com/xdandroid/hellocamera2/util/BitmapUtils.java | package com.xdandroid.hellocamera2.util;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import com.xdandroid.hellocamera2.app.App;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class BitmapUtils {
/**
* 将拍照得到的图片按照取景框(亮色区域)的范围进行裁剪.
* 对于1280x720的屏幕,裁剪起始点为坐标(52, 80),裁剪后,位图尺寸为896x588.(由layout xml定义的布局计算得到)
* 以上参数将按照图片的实际大小,进行等比例换算。
* @param originalBitmap 拍照得到的Bitmap
* @return 裁剪之后的Bitmap
*/
public static Bitmap crop(Bitmap originalBitmap) {
double originalWidth = originalBitmap.getWidth();
double originalHeight = originalBitmap.getHeight();
double scaleX = originalWidth / 1280;
scaleX = scaleX * 1.04;
double scaleY = originalHeight / 720;
int x = (int) (52 * scaleX + 0.5);
int y = (int) (80 * scaleY + 0.5);
int width = (int) (896 * scaleX + 0.5);
int height = (int) (588 * scaleY + 0.5);
Bitmap bitmap = Bitmap.createBitmap(originalBitmap, x, y, width, height);
originalBitmap.recycle();
return bitmap;
}
/**
* 若图片宽小于高,则逆时针旋转90° ; 否则,返回原图片.
* 适用于调用系统相机进行拍照,且希望图片总是横向的场景。
* @param sourceBitmap 拍照得到的Bitmap
* @return 若图片宽小于高,返回逆时针旋转90°后的Bitmap ; 否则,返回原Bitmap.
*/
public static Bitmap rotate(Bitmap sourceBitmap) {
int sourceWidth = sourceBitmap.getWidth();
int sourceHeight = sourceBitmap.getHeight();
if (sourceWidth >= sourceHeight) return sourceBitmap;
int maxInWidthAndHeight = Math.max(sourceWidth, sourceHeight);
Bitmap destBitmap = Bitmap.createBitmap(maxInWidthAndHeight, maxInWidthAndHeight, Bitmap.Config.ARGB_8888);
Matrix m = new Matrix();
m.setRotate(-90, maxInWidthAndHeight / 2, maxInWidthAndHeight / 2);
Canvas canvas = new Canvas(destBitmap);
canvas.drawBitmap(sourceBitmap, m, new Paint());
sourceBitmap.recycle();
return destBitmap;
}
/**
* 将图片文件压缩到所需的大小,返回位图对象.
* 若原图尺寸小于需要压缩到的尺寸,则返回原图.
* @param filePath 图片File
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
*/
public static Bitmap compress(File filePath, int reqWidth, int reqHeight) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath.toString(), options);
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
options.inJustDecodeBounds = false;
//inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
inSampleSizedBitmap.recycle();
return scaledBitmap;
}
/**
* 将图片文件压缩到所需的大小,返回位图对象.
* 若原图尺寸小于需要压缩到的尺寸,则返回原图.
* 该方法考虑了可能需要在压缩后进行旋转的情况,因此可放心传入reqWidth和reqHeight,而无需考虑图片方向.
* @param filePath 图片File
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
*/
@SuppressWarnings("SuspiciousNameCombination")
public static Bitmap compressConsideringRotation(File filePath, int reqWidth, int reqHeight) {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeFile(filePath.toString(), options);
boolean shouldRotate = options.outWidth < options.outHeight;
if (shouldRotate) {
options.inSampleSize = calculateInSampleSize(options, reqHeight, reqWidth);
} else {
options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight);
}
options.inJustDecodeBounds = false;
//inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
inSampleSizedBitmap.recycle();
return scaledBitmap;
}
/**
* 计算压缩参数BitmapFactory.Options.inSampleSize
* @param options BitmapFactory.Options
* @param reqWidth 压缩后的宽度
* @param reqHeight 压缩后的高度
* @return 计算得到的BitmapFactory.Options.inSampleSize
*/
private static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
int width = options.outWidth;
int height = options.outHeight;
if (width <= reqWidth && height <= reqHeight) {
return 1;
} else {
double scale = width >= height ? width / reqWidth : height / reqHeight;
double log = Math.log(scale) / Math.log(2);
double logCeil = Math.ceil(log);
return (int) Math.pow(2, logCeil);
}
}
/**
* BitmapFactory.decodeFile(String pathName, Options opts)的快捷方法.
* @param filePath 图片File
* @return 解码文件得到的Bitmap.
*/
public static Bitmap decodeFile(File filePath) {
return BitmapFactory.decodeFile(filePath.toString(), null);
}
/**
* 将Bitmap写入文件,文件位于外置存储上该应用包名目录的cache子目录中.
* @param bitmap 要写入的Bitmap
* @param fileName 文件名
* @return 文件对应的File.
*/
public static File writeBitmapToFile(Bitmap bitmap, String fileName) {
String pathTo = App.app.getExternalCacheDir() + "/" + fileName;
File file = new File(pathTo);
FileOutputStream fos;
try {
if (file.exists()) file.delete();
fos = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.JPEG, 90, fos);
fos.flush();
bitmap.recycle();
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
return file;
}
}
| 压缩时寻找最佳分辨率的算法调整.
| app/src/main/java/com/xdandroid/hellocamera2/util/BitmapUtils.java | 压缩时寻找最佳分辨率的算法调整. | <ide><path>pp/src/main/java/com/xdandroid/hellocamera2/util/BitmapUtils.java
<ide> * 将拍照得到的图片按照取景框(亮色区域)的范围进行裁剪.
<ide> * 对于1280x720的屏幕,裁剪起始点为坐标(52, 80),裁剪后,位图尺寸为896x588.(由layout xml定义的布局计算得到)
<ide> * 以上参数将按照图片的实际大小,进行等比例换算。
<add> *
<ide> * @param originalBitmap 拍照得到的Bitmap
<ide> * @return 裁剪之后的Bitmap
<ide> */
<ide> /**
<ide> * 若图片宽小于高,则逆时针旋转90° ; 否则,返回原图片.
<ide> * 适用于调用系统相机进行拍照,且希望图片总是横向的场景。
<add> *
<ide> * @param sourceBitmap 拍照得到的Bitmap
<ide> * @return 若图片宽小于高,返回逆时针旋转90°后的Bitmap ; 否则,返回原Bitmap.
<ide> */
<ide> /**
<ide> * 将图片文件压缩到所需的大小,返回位图对象.
<ide> * 若原图尺寸小于需要压缩到的尺寸,则返回原图.
<del> * @param filePath 图片File
<del> * @param reqWidth 压缩后的宽度
<add> * 该方法不处理图片方向问题,需要确保File的长宽比与req中的比例相近.
<add> *
<add> * @param filePath 图片File
<add> * @param reqWidth 压缩后的宽度
<ide> * @param reqHeight 压缩后的高度
<ide> * @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
<ide> */
<ide> /**
<ide> * 将图片文件压缩到所需的大小,返回位图对象.
<ide> * 若原图尺寸小于需要压缩到的尺寸,则返回原图.
<del> * 该方法考虑了可能需要在压缩后进行旋转的情况,因此可放心传入reqWidth和reqHeight,而无需考虑图片方向.
<del> * @param filePath 图片File
<del> * @param reqWidth 压缩后的宽度
<add> * 该方法考虑了图片方向问题,因此可始终传入reqWidth > reqHeight.
<add> *
<add> * @param filePath 图片File
<add> * @param reqWidth 压缩后的宽度
<ide> * @param reqHeight 压缩后的高度
<ide> * @return 压缩后的Bitmap. 若原图尺寸小于需要压缩到的尺寸,则返回File解码得到的Bitmap.
<ide> */
<ide> }
<ide>
<ide> /**
<del> * 计算压缩参数BitmapFactory.Options.inSampleSize
<del> * @param options BitmapFactory.Options
<del> * @param reqWidth 压缩后的宽度
<add> * 将图片文件压缩到所需的大小,返回位图对象.
<add> * 若原图尺寸小于需要压缩到的尺寸,则返回原图.
<add> * 该方法通过分辨率面积得到inSampleSize,因此不存在图片方向问题.
<add> *
<add> * @param filePath
<add> * @param reqSquarePixels
<add> * @return
<add> */
<add> public static Bitmap compressToResolution(File filePath, long reqSquarePixels) {
<add> BitmapFactory.Options options = new BitmapFactory.Options();
<add> options.inJustDecodeBounds = true;
<add> BitmapFactory.decodeFile(filePath.toString(), options);
<add> options.inSampleSize = calculateInSampleSizeBySquare(options, reqSquarePixels);
<add> options.inJustDecodeBounds = false;
<add> //inSampleSize通过SubSampling实现,只是节省了读入Bitmap后占用的内存,Bitmap本身的像素还是那么多,文件体积不会减小。
<add> Bitmap inSampleSizedBitmap = BitmapFactory.decodeFile(filePath.toString(), options);
<add> Bitmap scaledBitmap = Bitmap.createScaledBitmap(inSampleSizedBitmap, options.outWidth / options.inSampleSize, options.outHeight / options.inSampleSize, false);
<add> inSampleSizedBitmap.recycle();
<add> return scaledBitmap;
<add> }
<add>
<add> /**
<add> * 计算压缩参数BitmapFactory.Options.inSampleSize.
<add> * 考虑了options的长宽比可能与req中的比例不同的情况.
<add> * 该方法通过分辨率面积得到inSampleSize,因此不存在图片方向问题.
<add> *
<add> * @param options BitmapFactory.Options
<add> * @param reqSquarePixels 压缩后的分辨率面积
<add> * @return 计算得到的BitmapFactory.Options.inSampleSize
<add> */
<add> private static int calculateInSampleSizeBySquare(BitmapFactory.Options options, long reqSquarePixels) {
<add> long squarePixels = options.outWidth * options.outHeight;
<add> if (squarePixels <= reqSquarePixels) return 1;
<add> double powwedScale = ((double) squarePixels) / ((double) reqSquarePixels);
<add> double scale = Math.sqrt(powwedScale);
<add> double log = Math.log(scale) / Math.log(2);
<add> double logCeil = Math.ceil(log);
<add> return (int) Math.pow(2, logCeil);
<add> }
<add>
<add> /**
<add> * 计算压缩参数BitmapFactory.Options.inSampleSize.
<add> * 考虑了options中的长宽比可能与req的长宽比不同的情况.
<add> *
<add> * @param options BitmapFactory.Options
<add> * @param reqWidth 压缩后的宽度
<ide> * @param reqHeight 压缩后的高度
<ide> * @return 计算得到的BitmapFactory.Options.inSampleSize
<ide> */
<ide> private static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) {
<ide> int width = options.outWidth;
<ide> int height = options.outHeight;
<del> if (width <= reqWidth && height <= reqHeight) {
<del> return 1;
<del> } else {
<del> double scale = width >= height ? width / reqWidth : height / reqHeight;
<del> double log = Math.log(scale) / Math.log(2);
<del> double logCeil = Math.ceil(log);
<del> return (int) Math.pow(2, logCeil);
<del> }
<add> double ratio = ((double) width) / ((double) height);
<add> double reqRatio = ((double) reqWidth) / ((double) reqHeight);
<add> /**
<add> * 16:9 -> 4:3 : 3840x2160 -> 1600x1200 : 取height之比作为scale
<add> * 3:4 -> 9:16 : 2400x3200 -> 1080x1920 : 取height之比作为scale
<add> * 9:16 -> 3:4 : 2160x3840 -> 1200x1600 : 取width之比作为scale
<add> * 4:3 -> 16:9 : 3200x2400 -> 1920x1080 : 取width之比作为scale
<add> */
<add> boolean shouldUseWidthScale = ratio <= reqRatio;
<add> if (width <= reqWidth && height <= reqHeight) return 1;
<add> double scale = shouldUseWidthScale ? ((double) width) / ((double) reqWidth) : ((double) height) / ((double) reqHeight);
<add> double log = Math.log(scale) / Math.log(2);
<add> double logCeil = Math.ceil(log);
<add> return (int) Math.pow(2, logCeil);
<ide> }
<ide>
<ide> /**
<ide> * BitmapFactory.decodeFile(String pathName, Options opts)的快捷方法.
<add> *
<ide> * @param filePath 图片File
<ide> * @return 解码文件得到的Bitmap.
<ide> */
<ide>
<ide> /**
<ide> * 将Bitmap写入文件,文件位于外置存储上该应用包名目录的cache子目录中.
<del> * @param bitmap 要写入的Bitmap
<add> *
<add> * @param bitmap 要写入的Bitmap
<ide> * @param fileName 文件名
<ide> * @return 文件对应的File.
<ide> */ |
|
Java | apache-2.0 | e1aafbe34d0cb842cfe6ac78fe63bb49901eb79d | 0 | EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4,EBIBioSamples/biosamples-v4 | package uk.ac.ebi.biosamples;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.hateoas.Resource;
import org.springframework.stereotype.Component;
import uk.ac.ebi.biosamples.client.BioSamplesClient;
import uk.ac.ebi.biosamples.model.Attribute;
import uk.ac.ebi.biosamples.model.ExternalReference;
import uk.ac.ebi.biosamples.model.Sample;
import uk.ac.ebi.biosamples.ols.OlsProcessor;
import uk.ac.ebi.biosamples.ols.OlsResult;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.time.Instant;
import java.util.*;
import java.util.stream.Collectors;
@Component
public class EGAImportRunner implements ApplicationRunner {
private static final Logger LOG = LoggerFactory.getLogger(EGAImportRunner.class);
private static final String EGA_DATASET_BASE_URL = "https://ega-archive.org/datasets/";
private static final String EGA_SAMPLE_BASE_URL = "https://ega-archive.org/metadata/v2/samples/";
private static final Set<String> UNKNOWN_TERMS = new HashSet<>(Arrays.asList("n/a", "na", "n.a", "none",
"unknown", "--", ".", "null", "missing", "[not reported]", "[not requested]", "not applicable",
"not_applicable", "not collected", "not specified", "not known", "not reported", "missing: not provided"));
private static final String ATTRIBUTE_PHENOTYPE = "phenotype";
private static final String ATTRIBUTE_SEX = "sex";
private final Attribute organism;
private final BioSamplesClient bioSamplesClient;
private final OlsProcessor olsProcessor;
@Autowired
public EGAImportRunner(BioSamplesClient bioSamplesClient, OlsProcessor olsProcessor) {
this.bioSamplesClient = bioSamplesClient;
this.olsProcessor = olsProcessor;
organism = Attribute.build("organism", "Homo sapiens", "http://purl.obolibrary.org/obo/NCBITaxon_9606", null);
}
@Override
public void run(ApplicationArguments args) {
if (args.getSourceArgs().length < 1) {
LOG.error("Please specify a data folder as a program argument");
throw new IllegalArgumentException("Please specify a data folder as a program argument");
}
final String dataFolderUrl = args.getSourceArgs()[0];
final String datasetDuoUrl = dataFolderUrl + "datasets_duo.csv";
final String sampleDataUrl = dataFolderUrl + "sanger_released_samples.csv";
final String phenotypeIriFile = dataFolderUrl + "sanger_datasets_public_phenotype_hpo.csv";
Map<String, SortedSet<String>> datasetToDuoCodesMap = loadDuoCodeMap(datasetDuoUrl);
// Map<String, List<OlsResult>> phenotypeIriMap = loadPhenotypeIriMap(phenotypeIriFile);
Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();//todo remove this and uncomment above
try (BufferedReader br = new BufferedReader(new FileReader(sampleDataUrl))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", sampleDataUrl, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] sampleValues = line.split(",");
String accession = sampleValues[0];
String egaId = sampleValues[1];
String datasetId = sampleValues[2];
String phenotype = sampleValues[3];
String sex = sampleValues[4];
SortedSet<String> duoCodes = datasetToDuoCodesMap.get(datasetId);
List<OlsResult> phenotypeIris = phenotypeIriMap.get(phenotype);
processSampleRecord(accession, egaId, datasetId, phenotype, sex, duoCodes, phenotypeIris);
}
} catch (JsonProcessingException e) {
LOG.error("JSON conversion failure", e);
} catch (IOException e) {
LOG.error("Couldn't read file: " + datasetDuoUrl, e);
}
}
private void processSampleRecord(String accession, String egaId, String datasetId, String phenotype, String sex,
SortedSet<String> duoCodes, List<OlsResult> phenotypeIris)
throws JsonProcessingException {
final ObjectMapper jsonMapper = new ObjectMapper();
Optional<Resource<Sample>> sampleResource = bioSamplesClient.fetchSampleResource(accession);
if (sampleResource.isPresent()) {
Sample sample = sampleResource.get().getContent();
LOG.info("Original sample: {}", jsonMapper.writeValueAsString(sample));
if (sample.getAttributes().size() != 2) {
LOG.warn("Attributes size != 2, Attributes {}", sample.getAttributes());
}
//remove extra attributes from migration (deleted and other-migrated from....)
removeMigrationRelatedAttributes(sample);
Sample.Builder sampleBuilder = Sample.Builder.fromSample(sample)
.addAttribute(Attribute.build("ega dataset id", datasetId))
.addAttribute(Attribute.build("ega sample id", egaId))
.addAttribute(organism)
.addExternalReference(ExternalReference.build(EGA_DATASET_BASE_URL + datasetId, duoCodes))
.addExternalReference(ExternalReference.build(EGA_SAMPLE_BASE_URL + egaId))
.withRelease(Instant.now());
//ignore unknown, n/a terms
if (UNKNOWN_TERMS.contains(phenotype.toLowerCase())) {
LOG.info("Ignoring phenotype as it contains {}", phenotype);
} else {
Attribute attributePhenotype = populateAttribute(phenotype, phenotypeIris, ATTRIBUTE_PHENOTYPE);
sampleBuilder.addAttribute(attributePhenotype);
}
if (UNKNOWN_TERMS.contains(sex.toLowerCase())) {
LOG.info("Ignoring sex as it contains {}", sex);
} else {
Attribute attributeSex = populateAttribute(sex, getSexOntology(sex), ATTRIBUTE_SEX);
sampleBuilder.addAttribute(attributeSex);
}
Sample updatedSample = sampleBuilder.build();
LOG.info("Updated sample: {}", jsonMapper.writeValueAsString(updatedSample));
bioSamplesClient.persistSampleResource(updatedSample);
} else {
LOG.warn("Sample not found in biosamples: {}", accession);
}
}
private Map<String, SortedSet<String>> loadDuoCodeMap(String datasetDuoUrl) {
Map<String, SortedSet<String>> datasetToDuoCodesMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new FileReader(datasetDuoUrl))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", datasetDuoUrl, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] record = line.replaceAll("[\"\\[\\] ]", "").split(",");
String datasetId = record[0];
String[] duoCodes = Arrays.copyOfRange(record, 1, record.length);
datasetToDuoCodesMap.put(datasetId,
new TreeSet<>(Arrays.stream(duoCodes).map(s -> "DUO:" + s).collect(Collectors.toList())));
}
} catch (IOException e) {
LOG.error("couldn't read file: " + datasetDuoUrl, e);
}
return datasetToDuoCodesMap;
}
private Map<String, List<OlsResult>> loadPhenotypeIriMap(String phenotypeIriFile) {
Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new FileReader(phenotypeIriFile))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", phenotypeIriFile, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] record = line.split(",", -1);
String publicPhenotype = record[0];
String mappedPhenotype = record[1];
String hpoId = record[2];
String efoId = record[3];
List<OlsResult> iriSet = new ArrayList<>();
if (hpoId != null && !"".equals(hpoId)) {
Optional<OlsResult> olsResult = getOlsMappedTerm(hpoId);
olsResult.ifPresent(iriSet::add);
}
if (efoId != null && !"".equals(efoId)) {
Optional<OlsResult> olsResult = getOlsMappedTerm(efoId);
olsResult.ifPresent(iriSet::add);
}
phenotypeIriMap.put(publicPhenotype, iriSet);
}
} catch (IOException e) {
LOG.error("couldn't read file: " + phenotypeIriFile, e);
}
return phenotypeIriMap;
}
private void removeMigrationRelatedAttributes(Sample sample) {
List<Attribute> attributesToRemove = new ArrayList<>();
for (Attribute attribute : sample.getAttributes()) {
if (attribute.getType().equals("deleted") ||
(attribute.getType().equals("other") && attribute.getValue().startsWith("migrated from"))) {
attributesToRemove.add(attribute);
LOG.info("Removing attribute {}={} from original sample: {}", attribute.getType(), attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("phenotype")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute phenotype={} from original sample: {}", attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("organism")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute organism={} from original sample: {}", attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("sex")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute sex={} from original sample: {}", attribute.getValue(), sample.getAccession());
}
}
for (Attribute attribute : attributesToRemove) {
sample.getAttributes().remove(attribute);
}
}
private Attribute populateAttribute(String phenotype, List<OlsResult> attributeIris, String attributeType) {
Optional<OlsResult> olsMappedTerm = getOlsMappedTerm(phenotype);
Attribute attribute;
List<String> iris = new ArrayList<>();
if (attributeIris != null && !attributeIris.isEmpty()) {
for (OlsResult o : attributeIris) {
iris.add(o.getIri());
}
}
if (olsMappedTerm.isPresent()) {
iris.add(olsMappedTerm.get().getIri());
attribute = Attribute.build(attributeType, olsMappedTerm.get().getLabel(), iris, null);
} else {
attribute = Attribute.build(attributeType, phenotype, iris, null);
}
return attribute;
}
private Optional<OlsResult> getOlsMappedTerm(String termToMap) {
Optional<OlsResult> olsMappedTerm = Optional.empty();
if (termToMap.matches("^[A-Za-z]+[_:\\-][0-9]+$")) {
olsMappedTerm = olsProcessor.queryForOlsObject(termToMap);
if (olsMappedTerm.isPresent()) {
LOG.info("OLS mapped term {} into {}", termToMap, olsMappedTerm.get().getIri());
} else {
LOG.warn("Could not find term({}) in OLS", termToMap);
}
}
return olsMappedTerm;
}
private List<OlsResult> getSexOntology(String sex) {
List<OlsResult> olsResults;
switch(sex.toLowerCase()) {
case "male":
olsResults = Collections.singletonList(new OlsResult("male", "http://purl.obolibrary.org/obo/PATO_0000384"));
break;
case "female":
olsResults = Collections.singletonList(new OlsResult("female", "http://purl.obolibrary.org/obo/PATO_0000383"));
break;
default:
olsResults = null;
break;
}
return olsResults;
}
}
| scripts/ega-import/src/main/java/uk/ac/ebi/biosamples/EGAImportRunner.java | package uk.ac.ebi.biosamples;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.hateoas.Resource;
import org.springframework.stereotype.Component;
import uk.ac.ebi.biosamples.client.BioSamplesClient;
import uk.ac.ebi.biosamples.model.Attribute;
import uk.ac.ebi.biosamples.model.ExternalReference;
import uk.ac.ebi.biosamples.model.Sample;
import uk.ac.ebi.biosamples.ols.OlsProcessor;
import uk.ac.ebi.biosamples.ols.OlsResult;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.time.Instant;
import java.util.*;
import java.util.stream.Collectors;
@Component
public class EGAImportRunner implements ApplicationRunner {
private static final Logger LOG = LoggerFactory.getLogger(EGAImportRunner.class);
private static final String EGA_DATASET_BASE_URL = "https://ega-archive.org/datasets/";
private static final String EGA_SAMPLE_BASE_URL = "https://ega-archive.org/metadata/v2/samples/";
private static final Set<String> UNKNOWN_TERMS = new HashSet<>(Arrays.asList("n/a", "na", "n.a", "none",
"unknown", "--", ".", "null", "missing", "[not reported]", "[not requested]", "not applicable",
"not_applicable", "not collected", "not specified", "not known", "not reported", "missing: not provided"));
private static final String ATTRIBUTE_PHENOTYPE = "phenotype";
private static final String ATTRIBUTE_SEX = "sex";
private final Attribute organism;
private final BioSamplesClient bioSamplesClient;
private final OlsProcessor olsProcessor;
@Autowired
public EGAImportRunner(BioSamplesClient bioSamplesClient, OlsProcessor olsProcessor) {
this.bioSamplesClient = bioSamplesClient;
this.olsProcessor = olsProcessor;
organism = Attribute.build("organism", "Homo sapiens", "http://purl.obolibrary.org/obo/NCBITaxon_9606", null);
}
@Override
public void run(ApplicationArguments args) {
if (args.getSourceArgs().length < 1) {
LOG.error("Please specify a data folder as a program argument");
throw new IllegalArgumentException("Please specify a data folder as a program argument");
}
final String dataFolderUrl = args.getSourceArgs()[0];
final String datasetDuoUrl = dataFolderUrl + "datasets_duo.csv";
final String sampleDataUrl = dataFolderUrl + "sanger_released_samples.csv";
final String phenotypeIriFile = dataFolderUrl + "sanger_datasets_public_phenotype_hpo.csv";
Map<String, SortedSet<String>> datasetToDuoCodesMap = loadDuoCodeMap(datasetDuoUrl);
// Map<String, List<OlsResult>> phenotypeIriMap = loadPhenotypeIriMap(phenotypeIriFile);
Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new FileReader(sampleDataUrl))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", sampleDataUrl, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] sampleValues = line.split(",");
String accession = sampleValues[0];
String egaId = sampleValues[1];
String datasetId = sampleValues[2];
String phenotype = sampleValues[3];
String sex = sampleValues[4];
SortedSet<String> duoCodes = datasetToDuoCodesMap.get(datasetId);
List<OlsResult> phenotypeIris = phenotypeIriMap.get(phenotype);
processSampleRecord(accession, egaId, datasetId, phenotype, sex, duoCodes, phenotypeIris);
}
} catch (JsonProcessingException e) {
LOG.error("JSON conversion failure", e);
} catch (IOException e) {
LOG.error("Couldn't read file: " + datasetDuoUrl, e);
}
}
private void processSampleRecord(String accession, String egaId, String datasetId, String phenotype, String sex,
SortedSet<String> duoCodes, List<OlsResult> phenotypeIris)
throws JsonProcessingException {
final ObjectMapper jsonMapper = new ObjectMapper();
Optional<Resource<Sample>> sampleResource = bioSamplesClient.fetchSampleResource(accession);
if (sampleResource.isPresent()) {
Sample sample = sampleResource.get().getContent();
LOG.info("Original sample: {}", jsonMapper.writeValueAsString(sample));
if (sample.getAttributes().size() != 2) {
LOG.warn("Attributes size != 2, Attributes {}", sample.getAttributes());
}
//remove extra attributes from migration (deleted and other-migrated from....)
removeMigrationRelatedAttributes(sample);
Sample.Builder sampleBuilder = Sample.Builder.fromSample(sample)
.addAttribute(Attribute.build("ega dataset id", datasetId))
.addAttribute(Attribute.build("ega sample id", egaId))
.addAttribute(organism)
.addExternalReference(ExternalReference.build(EGA_DATASET_BASE_URL + datasetId, duoCodes))
.addExternalReference(ExternalReference.build(EGA_SAMPLE_BASE_URL + egaId))
.withRelease(Instant.now());
//ignore unknown, n/a terms
if (UNKNOWN_TERMS.contains(phenotype.toLowerCase())) {
LOG.info("Ignoring phenotype as it contains {}", phenotype);
} else {
Attribute attributePhenotype = populateAttribute(phenotype, phenotypeIris, ATTRIBUTE_PHENOTYPE);
sampleBuilder.addAttribute(attributePhenotype);
}
if (UNKNOWN_TERMS.contains(sex.toLowerCase())) {
LOG.info("Ignoring sex as it contains {}", sex);
} else {
Attribute attributeSex = populateAttribute(sex, getSexOntology(sex), ATTRIBUTE_SEX);
sampleBuilder.addAttribute(attributeSex);
}
Sample updatedSample = sampleBuilder.build();
LOG.info("Updated sample: {}", jsonMapper.writeValueAsString(updatedSample));
bioSamplesClient.persistSampleResource(updatedSample);
} else {
LOG.warn("Sample not found in biosamples: {}", accession);
}
}
private Map<String, SortedSet<String>> loadDuoCodeMap(String datasetDuoUrl) {
Map<String, SortedSet<String>> datasetToDuoCodesMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new FileReader(datasetDuoUrl))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", datasetDuoUrl, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] record = line.replaceAll("[\"\\[\\] ]", "").split(",");
String datasetId = record[0];
String[] duoCodes = Arrays.copyOfRange(record, 1, record.length);
datasetToDuoCodesMap.put(datasetId,
new TreeSet<>(Arrays.stream(duoCodes).map(s -> "DUO:" + s).collect(Collectors.toList())));
}
} catch (IOException e) {
LOG.error("couldn't read file: " + datasetDuoUrl, e);
}
return datasetToDuoCodesMap;
}
private Map<String, List<OlsResult>> loadPhenotypeIriMap(String phenotypeIriFile) {
Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new FileReader(phenotypeIriFile))) {
String line = br.readLine(); //ignore header
LOG.info("Reading file: {}, headers: {}", phenotypeIriFile, line);
while ((line = br.readLine()) != null && !line.isEmpty()) {
String[] record = line.split(",", -1);
String publicPhenotype = record[0];
String mappedPhenotype = record[1];
String hpoId = record[2];
String efoId = record[3];
List<OlsResult> iriSet = new ArrayList<>();
if (hpoId != null && !"".equals(hpoId)) {
Optional<OlsResult> olsResult = getOlsMappedTerm(hpoId);
olsResult.ifPresent(iriSet::add);
}
if (efoId != null && !"".equals(efoId)) {
Optional<OlsResult> olsResult = getOlsMappedTerm(efoId);
olsResult.ifPresent(iriSet::add);
}
phenotypeIriMap.put(publicPhenotype, iriSet);
}
} catch (IOException e) {
LOG.error("couldn't read file: " + phenotypeIriFile, e);
}
return phenotypeIriMap;
}
private void removeMigrationRelatedAttributes(Sample sample) {
List<Attribute> attributesToRemove = new ArrayList<>();
for (Attribute attribute : sample.getAttributes()) {
if (attribute.getType().equals("deleted") ||
(attribute.getType().equals("other") && attribute.getValue().startsWith("migrated from"))) {
attributesToRemove.add(attribute);
LOG.info("Removing attribute {}={} from original sample: {}", attribute.getType(), attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("phenotype")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute phenotype={} from original sample: {}", attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("organism")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute organism={} from original sample: {}", attribute.getValue(), sample.getAccession());
} else if (attribute.getType().equals("sex")) {
attributesToRemove.add(attribute);
LOG.warn("Removing attribute sex={} from original sample: {}", attribute.getValue(), sample.getAccession());
}
}
for (Attribute attribute : attributesToRemove) {
sample.getAttributes().remove(attribute);
}
}
private Attribute populateAttribute(String phenotype, List<OlsResult> attributeIris, String attributeType) {
Optional<OlsResult> olsMappedTerm = getOlsMappedTerm(phenotype);
Attribute attribute;
List<String> iris = new ArrayList<>();
if (attributeIris != null && !attributeIris.isEmpty()) {
for (OlsResult o : attributeIris) {
iris.add(o.getIri());
}
}
if (olsMappedTerm.isPresent()) {
iris.add(olsMappedTerm.get().getIri());
attribute = Attribute.build(attributeType, olsMappedTerm.get().getLabel(), iris, null);
} else {
attribute = Attribute.build(attributeType, phenotype, iris, null);
}
return attribute;
}
private Optional<OlsResult> getOlsMappedTerm(String termToMap) {
Optional<OlsResult> olsMappedTerm = Optional.empty();
if (termToMap.matches("^[A-Za-z]+[_:\\-][0-9]+$")) {
olsMappedTerm = olsProcessor.queryForOlsObject(termToMap);
if (olsMappedTerm.isPresent()) {
LOG.info("OLS mapped term {} into {}", termToMap, olsMappedTerm.get().getIri());
} else {
LOG.warn("Could not find term({}) in OLS", termToMap);
}
}
return olsMappedTerm;
}
private List<OlsResult> getSexOntology(String sex) {
List<OlsResult> olsResults;
switch(sex.toLowerCase()) {
case "male":
olsResults = Collections.singletonList(new OlsResult("male", "http://purl.obolibrary.org/obo/PATO_0000384"));
break;
case "female":
olsResults = Collections.singletonList(new OlsResult("female", "http://purl.obolibrary.org/obo/PATO_0000383"));
break;
default:
olsResults = null;
break;
}
return olsResults;
}
}
| commenting ols mapping of known attributes until we receive the correct file
| scripts/ega-import/src/main/java/uk/ac/ebi/biosamples/EGAImportRunner.java | commenting ols mapping of known attributes until we receive the correct file | <ide><path>cripts/ega-import/src/main/java/uk/ac/ebi/biosamples/EGAImportRunner.java
<ide>
<ide> Map<String, SortedSet<String>> datasetToDuoCodesMap = loadDuoCodeMap(datasetDuoUrl);
<ide> // Map<String, List<OlsResult>> phenotypeIriMap = loadPhenotypeIriMap(phenotypeIriFile);
<del> Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();
<add> Map<String, List<OlsResult>> phenotypeIriMap = new HashMap<>();//todo remove this and uncomment above
<ide>
<ide> try (BufferedReader br = new BufferedReader(new FileReader(sampleDataUrl))) {
<ide> String line = br.readLine(); //ignore header |
|
JavaScript | mit | b1f7e5df88e8fac5eee675b97bac75725f2232ae | 0 | jaubourg/dominoes,jaubourg/dominoes | var rules = {},
rulesInternals = {};
// Declare or get a rule
dominoes.rule = function( id ) {
var length = arguments.length;
if ( length > 1 ) {
var list = parseList( slice.call( arguments , 1 ) ),
ruleInternal = rulesInternals[ id ];
// Create entry no matter what
if ( ! ruleInternal ) {
var go = function() {
execute( ruleInternal , function() {
if ( running = list.length ) {
running--;
list.shift()();
if ( running ) {
go();
}
}
} );
},
running;
ruleInternal = rulesInternals[ id ] = [];
rules[ id ] = function ( callback ) {
if ( isFunction(callback) ) {
list.push( callback );
}
if ( ! running ) {
running = TRUE;
go();
}
return true;
};
}
// Filter out empty lists
if ( list.length ) {
// Note as non optional
list.push( FALSE );
// Add in
ruleInternal.push( list );
}
// Free list for re-use
list = [];
} else if ( length ) {
return rules[ id ];
} else {
rules = {};
rulesInternals = {};
}
return this;
};
| src/rule.js | var rules = {},
rulesInternals = {};
// Declare or get a rule
dominoes.rule = function( id ) {
var length = arguments.length;
if ( length > 1 ) {
var list = parseList( slice.call( arguments , 1 ) ),
ruleInternal = rulesInternals[ id ];
// Create entry no matter what
if ( ! ruleInternal ) {
ruleInternal = rulesInternals[ id ] = [];
rules[ id ] = function ( callback ) {
return execute( ruleInternal , callback );
};
}
// Filter out empty lists
if ( list.length ) {
// Note as non optional
list.push( FALSE );
// Add in
ruleInternal.push( list );
}
} else if ( length ) {
return rules[ id ];
} else {
rules = {};
rulesInternals = {};
}
return this;
};
| Fixed a nasty bug that made rules consumed ahead of time.
Signed-off-by: jaubourg <[email protected]>
| src/rule.js | Fixed a nasty bug that made rules consumed ahead of time. | <ide><path>rc/rule.js
<ide>
<ide> // Create entry no matter what
<ide> if ( ! ruleInternal ) {
<add>
<add> var go = function() {
<add> execute( ruleInternal , function() {
<add> if ( running = list.length ) {
<add> running--;
<add> list.shift()();
<add> if ( running ) {
<add> go();
<add> }
<add> }
<add> } );
<add> },
<add> running;
<add>
<ide> ruleInternal = rulesInternals[ id ] = [];
<ide> rules[ id ] = function ( callback ) {
<del> return execute( ruleInternal , callback );
<add> if ( isFunction(callback) ) {
<add> list.push( callback );
<add> }
<add> if ( ! running ) {
<add> running = TRUE;
<add> go();
<add> }
<add> return true;
<ide> };
<ide> }
<ide>
<ide> // Add in
<ide> ruleInternal.push( list );
<ide> }
<add>
<add> // Free list for re-use
<add> list = [];
<ide>
<ide> } else if ( length ) {
<ide> |
|
Java | apache-2.0 | d3c4a2830c3bd6d2c24748561f4580a4ef532c49 | 0 | tremes/testng,rschmitt/testng,meeroslaph/testng,akozlova/testng,jerome-jacob/testng,smaudet/testng,VikingDen/testng,6ft-invsbl-rbbt/testng,tobecrazy/testng,aledsage/testng,missedone/testng,jerome-jacob/testng,raindev/testng,krmahadevan/testng,tobecrazy/testng,jaypal/testng,tremes/testng,s2oBCN/testng,VikingDen/testng,scr/testng,jaypal/testng,missedone/testng,emopers/testng,missedone/testng,JeshRJ/myRepRJ,smaudet/testng,juherr/testng,bmlct/testng,juherr/testng,AJ-72/testng,krmahadevan/testng,tobecrazy/testng,missedone/testng,aledsage/testng,jaypal/testng,msebire/testng,meeroslaph/testng,AJ-72/testng,cbeust/testng,aledsage/testng,gjuillot/testng,gjuillot/testng,rschmitt/testng,bmlct/testng,akozlova/testng,6ft-invsbl-rbbt/testng,gjuillot/testng,JeshRJ/myRepRJ,s2oBCN/testng,scr/testng,rschmitt/testng,meeroslaph/testng,raindev/testng,tobecrazy/testng,AJ-72/testng,jerome-jacob/testng,emopers/testng,emopers/testng,raindev/testng,6ft-invsbl-rbbt/testng,bmlct/testng,6ft-invsbl-rbbt/testng,bmlct/testng,juherr/testng,akozlova/testng,JeshRJ/myRepRJ,jerome-jacob/testng,smaudet/testng,raindev/testng,AJ-72/testng,juherr/testng,akozlova/testng,rschmitt/testng,VladRassokhin/testng,s2oBCN/testng,AJ-72/testng,msebire/testng,msebire/testng,aledsage/testng,cbeust/testng,tobecrazy/testng,smaudet/testng,VladRassokhin/testng,aledsage/testng,cbeust/testng,emopers/testng,VladRassokhin/testng,akozlova/testng,gjuillot/testng,VladRassokhin/testng,s2oBCN/testng,JeshRJ/myRepRJ,tremes/testng,VladRassokhin/testng,krmahadevan/testng,scr/testng,tremes/testng,cbeust/testng,scr/testng,emopers/testng,VikingDen/testng,bmlct/testng,jerome-jacob/testng,6ft-invsbl-rbbt/testng,tremes/testng,msebire/testng,cbeust/testng,meeroslaph/testng,VladRassokhin/testng,juherr/testng,s2oBCN/testng,krmahadevan/testng,jaypal/testng,scr/testng,raindev/testng,aledsage/testng,JeshRJ/myRepRJ,smaudet/testng,missedone/testng,AJ-72/testng,krmahadevan/testng,VikingDen/testng,msebire/testng,VikingDen/testng,raindev/testng,gjuillot/testng,meeroslaph/testng,jaypal/testng,rschmitt/testng | package test.tmp;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import test.SimpleBaseTest;
//@Test(sequential = true)
//@Listeners(AListener.class)
public class A extends SimpleBaseTest {
// @Factory
public Object[] f() {
return new Object[] {
new A(),
new A()
};
}
@BeforeMethod
public void bm() {
System.out.println("Before method");
}
@AfterMethod
public void am() {
System.out.println("After method");
}
@Test
public void a1() {
System.out.println("a1 throwing");
throw new RuntimeException();
// System.out.println("a1 " + Thread.currentThread().getId());
}
// @Test
public void a2() {
System.out.println("a2 " + Thread.currentThread().getId());
}
}
| src/test/java/test/tmp/A.java | package test.tmp;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Listeners;
import org.testng.annotations.Test;
import test.SimpleBaseTest;
//@Test(sequential = true)
@Listeners(AListener.class)
public class A extends SimpleBaseTest {
// @Factory
public Object[] f() {
return new Object[] {
new A(),
new A()
};
}
@BeforeMethod
public void bm() {
System.out.println("Before method");
}
@AfterMethod
public void am() {
System.out.println("After method");
}
@Test
public void a1() {
System.out.println("a1 throwing");
throw new RuntimeException();
// System.out.println("a1 " + Thread.currentThread().getId());
}
// @Test
public void a2() {
System.out.println("a2 " + Thread.currentThread().getId());
}
}
| Build fix
| src/test/java/test/tmp/A.java | Build fix | <ide><path>rc/test/java/test/tmp/A.java
<ide>
<ide> import org.testng.annotations.AfterMethod;
<ide> import org.testng.annotations.BeforeMethod;
<del>import org.testng.annotations.Listeners;
<ide> import org.testng.annotations.Test;
<ide>
<ide> import test.SimpleBaseTest;
<ide>
<ide> //@Test(sequential = true)
<del>@Listeners(AListener.class)
<add>//@Listeners(AListener.class)
<ide> public class A extends SimpleBaseTest {
<ide>
<ide> // @Factory |
|
Java | agpl-3.0 | 7f6062c462649449c57feca2e83fddbaa2e7280f | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 5c368cbe-2e62-11e5-9284-b827eb9e62be | hello.java | 5c3120da-2e62-11e5-9284-b827eb9e62be | 5c368cbe-2e62-11e5-9284-b827eb9e62be | hello.java | 5c368cbe-2e62-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>5c3120da-2e62-11e5-9284-b827eb9e62be
<add>5c368cbe-2e62-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 71c465bf08c850dd22e434854828b6d681c2a708 | 0 | benvanwerkhoven/Xenon,benvanwerkhoven/Xenon,NLeSC/Xenon,NLeSC/Xenon | /*
* Copyright 2013 Netherlands eScience Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.esciencecenter.xenon.adaptors;
import java.io.Closeable;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import nl.esciencecenter.xenon.Xenon;
import nl.esciencecenter.xenon.XenonException;
import nl.esciencecenter.xenon.XenonFactory;
import nl.esciencecenter.xenon.credentials.Credential;
import nl.esciencecenter.xenon.credentials.Credentials;
import nl.esciencecenter.xenon.engine.files.PathAttributesPairImplementation;
import nl.esciencecenter.xenon.files.Copy;
import nl.esciencecenter.xenon.files.CopyOption;
import nl.esciencecenter.xenon.files.CopyStatus;
import nl.esciencecenter.xenon.files.DirectoryStream;
import nl.esciencecenter.xenon.files.FileAttributes;
import nl.esciencecenter.xenon.files.FileSystem;
import nl.esciencecenter.xenon.files.Files;
import nl.esciencecenter.xenon.files.OpenOption;
import nl.esciencecenter.xenon.files.Path;
import nl.esciencecenter.xenon.files.PathAttributesPair;
import nl.esciencecenter.xenon.files.PosixFilePermission;
import nl.esciencecenter.xenon.files.RelativePath;
import nl.esciencecenter.xenon.util.Utils;
import org.junit.After;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runners.MethodSorters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Jason Maassen <[email protected]>
*
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public abstract class GenericFileAdaptorTestParent {
private static final Logger logger = LoggerFactory.getLogger(GenericFileAdaptorTestParent.class);
protected static String TEST_ROOT;
public static FileTestConfig config;
protected Xenon xenon;
protected Files files;
protected Credentials credentials;
protected Path testDir;
private long counter = 0;
@Rule
public TestWatcher watcher = new TestWatcher() {
@Override
public void starting(Description description) {
logger.info("Running test {}", description.getMethodName());
}
@Override
public void failed(Throwable reason, Description description) {
logger.info("Test {} failed due to exception", description.getMethodName(), reason);
}
@Override
public void succeeded(Description description) {
logger.info("Test {} succeeded", description.getMethodName());
}
@Override
public void skipped(AssumptionViolatedException reason, Description description) {
logger.info("Test {} skipped due to failed assumption", description.getMethodName(), reason);
}
};
// MUST be invoked by a @BeforeClass method of the subclass!
public static void prepareClass(FileTestConfig testConfig) throws Exception {
config = testConfig;
TEST_ROOT = "xenon_test_" + config.getAdaptorName() + "_" + System.currentTimeMillis();
}
// MUST be invoked by a @AfterClass method of the subclass!
public static void cleanupClass() throws Exception {
System.err.println("GenericFileAdaptorTest.cleanupClass() attempting to remove: " + TEST_ROOT);
Xenon xenon = XenonFactory.newXenon(null);
Files files = xenon.files();
Credentials credentials = xenon.credentials();
Path p = config.getWorkingDir(files, credentials);
Path root = files.newPath(p.getFileSystem(), p.getRelativePath().resolve(TEST_ROOT));
if (files.exists(root)) {
files.delete(root);
}
XenonFactory.endXenon(xenon);
}
public Path resolve(Path root, String... path) throws XenonException {
return files.newPath(root.getFileSystem(), root.getRelativePath().resolve(new RelativePath(path)));
}
@Before
public void prepare() throws Exception {
xenon = XenonFactory.newXenon(null);
files = xenon.files();
credentials = xenon.credentials();
}
@After
public void cleanup() throws Exception {
XenonFactory.endXenon(xenon);
files = null;
xenon = null;
}
// Various util functions ------------------------------------------------------------
class AllTrue implements DirectoryStream.Filter {
@Override
public boolean accept(Path entry) {
return true;
}
}
class AllFalse implements DirectoryStream.Filter {
@Override
public boolean accept(Path entry) {
return false;
}
}
class Select implements DirectoryStream.Filter {
private Set<Path> set;
public Select(Set<Path> set) {
this.set = set;
}
@Override
public boolean accept(Path entry) {
return set.contains(entry);
}
}
private void throwUnexpected(String name, Exception e) throws Exception {
throw new Exception(name + " throws unexpected Exception!", e);
}
private void throwExpected(String name) throws Exception {
throw new Exception(name + " did NOT throw Exception which was expected!");
}
private void throwWrong(String name, Object expected, Object result) throws Exception {
throw new Exception(name + " produced wrong result! Expected: " + expected + " but got: " + result);
}
private void throwUnexpectedElement(String name, Object element) throws Exception {
throw new Exception(name + " produced unexpected element: " + element);
}
// private void throwMissingElement(String name, String element) throws Exception {
//
// throw new Exception(name + " did NOT produce element: " + element);
// }
private void throwMissingElements(String name, Collection elements) throws Exception {
throw new Exception(name + " did NOT produce elements: " + elements);
}
private void close(Closeable c) {
if (c == null) {
return;
}
try {
c.close();
} catch (Exception e) {
// ignore
}
}
// Depends on: Path.resolve, RelativePath, exists
private Path createNewTestDirName(Path root) throws Exception {
Path dir = resolve(root, "dir" + counter);
counter++;
if (files.exists(dir)) {
throw new Exception("Generated test dir already exists! " + dir);
}
return dir;
}
// Depends on: [createNewTestDirName], createDirectory, exists
private Path createTestDir(Path root) throws Exception {
Path dir = createNewTestDirName(root);
files.createDirectory(dir);
if (!files.exists(dir)) {
throw new Exception("Failed to generate test dir! " + dir);
}
return dir;
}
// Depends on: [createTestDir]
protected void prepareTestDir(String testName) throws Exception {
Path p = config.getWorkingDir(files, credentials);
if (testDir != null) {
return;
}
testDir = resolve(p, TEST_ROOT, testName);
if (!files.exists(testDir)) {
files.createDirectories(testDir);
}
}
// Depends on: [createTestDir]
private void closeTestFS() throws Exception {
if (testDir == null) {
return;
}
files.close(testDir.getFileSystem());
testDir = null;
}
// Depends on: Path.resolve, RelativePath, exists
private Path createNewTestFileName(Path root) throws Exception {
Path file = resolve(root, "file" + counter);
counter++;
if (files.exists(file)) {
throw new Exception("Generated NEW test file already exists! " + file);
}
return file;
}
// Depends on: newOutputStream
private void writeData(Path testFile, byte[] data) throws Exception {
OutputStream out = files.newOutputStream(testFile, OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.WRITE);
if (data != null) {
out.write(data);
}
out.close();
}
// Depends on: [createNewTestFileName], createFile, [writeData]
protected Path createTestFile(Path root, byte[] data) throws Exception {
Path file = createNewTestFileName(root);
files.createFile(file);
if (data != null && data.length > 0) {
writeData(file, data);
}
return file;
}
// Depends on: exists, isDirectory, delete
private void deleteTestFile(Path file) throws Exception {
if (!files.exists(file)) {
throw new Exception("Cannot delete non-existing file: " + file);
}
FileAttributes att = files.getAttributes(file);
if (att.isDirectory()) {
throw new Exception("Cannot delete directory: " + file);
}
files.delete(file);
}
// Depends on: exists, isDirectory, delete
protected void deleteTestDir(Path dir) throws Exception {
if (!files.exists(dir)) {
throw new Exception("Cannot delete non-existing dir: " + dir);
}
FileAttributes att = files.getAttributes(dir);
if (!att.isDirectory()) {
throw new Exception("Cannot delete file: " + dir);
}
files.delete(dir);
}
private byte[] readFully(InputStream in) throws Exception {
byte[] buffer = new byte[1024];
int offset = 0;
int read = in.read(buffer, offset, buffer.length - offset);
while (read != -1) {
offset += read;
if (offset == buffer.length) {
buffer = Arrays.copyOf(buffer, buffer.length * 2);
}
read = in.read(buffer, offset, buffer.length - offset);
}
close(in);
return Arrays.copyOf(buffer, offset);
}
// private byte [] readFully(SeekableByteChannel channel) throws Exception {
//
// ByteBuffer buffer = ByteBuffer.allocate(1024);
//
// int read = channel.read(buffer);
//
// while (read != -1) {
//
// System.err.println("READ from channel " + read);
//
// if (buffer.position() == buffer.limit()) {
// ByteBuffer tmp = ByteBuffer.allocate(buffer.limit()*2);
// buffer.flip();
// tmp.put(buffer);
// buffer = tmp;
// }
//
// read = channel.read(buffer);
// }
//
// close(channel);
//
// buffer.flip();
// byte [] tmp = new byte[buffer.remaining()];
// buffer.get(tmp);
//
// System.err.println("Returning byte[" + tmp.length + "]");
//
// return tmp;
// }
// The test start here.
// ---------------------------------------------------------------------------------------------------------------------------
// TEST newFileSystem
//
// Possible parameters:
// URI - correct URI / wrong user / wrong location / wrong path
// Credentials - default / null / value
// Properties - null / empty / set right / set wrong
//
// Total combinations: 4 + 2 + 3 = 9
//
// Depends on: newFileSystem, close
private void test00_newFileSystem(String scheme, String location, Credential c, Map<String, String> p, boolean mustFail)
throws Exception {
try {
FileSystem fs = files.newFileSystem(scheme, location, c, p);
files.close(fs);
} catch (Exception e) {
if (mustFail) {
// exception was expected.
return;
}
// exception was not expected
throwUnexpected("test00_newFileSystem", e);
}
if (mustFail) {
// expected an exception!
throwExpected("test00_newFileSystem");
}
}
@org.junit.Test
public void test00_newFileSystem_nullUriAndCredentials_shouldThrow() throws Exception {
test00_newFileSystem(null, null, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nullCredentials_shouldThrow() throws Exception {
test00_newFileSystem(config.getScheme(), null, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nullProperties_throwIfApplicable() throws Exception {
// test with correct URI without credential and without properties
boolean allowNull = config.supportNullCredential();
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), null, null, !allowNull);
}
@org.junit.Test
public void test00_newFileSystem_correctArguments_noThrow() throws Exception {
// test with correct scheme with, correct location, location
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials), null,
false);
}
@org.junit.Test
public void test00_newFileSystem_wrongLocation_throw() throws Exception {
// test with correct scheme with, wrong location
test00_newFileSystem(config.getScheme(), config.getWrongLocation(), config.getDefaultCredential(credentials), null, true);
}
@org.junit.Test
public void test00_newFileSystem_userInUriIfSupported_noThrow() throws Exception {
if (!config.supportUserInUri()) {
return;
}
String uriWithUsername = config.getCorrectLocationWithUser();
test00_newFileSystem(config.getScheme(), uriWithUsername, null, null, false);
}
@org.junit.Test
public void test00_newFileSystem_wrongUserInUriIfSupported_noThrow() throws Exception {
if (!config.supportUserInUri()) {
return;
}
String uriWithWrongUser = config.getCorrectLocationWithWrongUser();
test00_newFileSystem(config.getScheme(), uriWithWrongUser, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nonDefaultCredentialIfSupported_noThrow() throws Exception {
if (!config.supportNonDefaultCredential()) {
return;
}
Credential nonDefaultCredential = config.getNonDefaultCredential(credentials);
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), nonDefaultCredential, null, false);
}
@org.junit.Test
public void test00_newFileSystem_emptyProperties_noThrow() throws Exception {
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
new HashMap<String, String>(), false);
}
@org.junit.Test
public void test00_newFileSystem_correctProperties_noThrow() throws Exception {
if (!config.supportsProperties()) {
return;
}
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
config.getCorrectProperties(), false);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST isOpen
//
// Possible parameters:
//
// FileSystem - null / open FS / closed FS
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], close, isOpen
private void test01_isOpen(FileSystem fs, boolean expected, boolean mustFail) throws Exception {
boolean result = false;
try {
result = files.isOpen(fs);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test01_isOpen", e);
}
if (mustFail) {
throwExpected("test01_isOpen");
}
if (result != expected) {
throwWrong("test01_isOpen", expected, result);
}
}
@org.junit.Test
public void test01_isOpen_fsIsNull_throw() throws Exception {
test01_isOpen(null, false, true);
}
@org.junit.Test
public void test01_isOpen_openFs_true() throws Exception {
FileSystem fs = config.getTestFileSystem(files, credentials);
test01_isOpen(fs, true, false);
}
@org.junit.Test
public void test01_isOpen_closedFsIfSupported_false() throws Exception {
if (!config.supportsClose()) {
return;
}
FileSystem fs = config.getTestFileSystem(files, credentials);
files.close(fs);
test01_isOpen(fs, false, false);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST close
//
// Possible parameters:
//
// FileSystem - null / open FS / closed FS
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], close
private void test02_close(FileSystem fs, boolean mustFail) throws Exception {
try {
files.close(fs);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test02_close", e);
}
if (mustFail) {
throwExpected("test02_close");
}
}
@org.junit.Test
public void test02_close() throws Exception {
// test with null filesystem
test02_close(null, true);
if (config.supportsClose()) {
FileSystem fs = config.getTestFileSystem(files, credentials);
// test with correct open filesystem
test02_close(fs, false);
// test with correct closed filesystem
test02_close(fs, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST newPath
//
// Possible parameters:
//
// FileSystem - null / correct
// RelativePath - null / empty / value
//
// Total combinations : 2
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), Path.getPath(), RelativePath, close
private void test03_newPath(FileSystem fs, RelativePath path, String expected, boolean mustFail) throws Exception {
String result = null;
try {
result = files.newPath(fs, path).getRelativePath().getAbsolutePath();
} catch (Exception e) {
if (mustFail) {
// expected exception
return;
}
throwUnexpected("test03_newPath", e);
}
if (mustFail) {
throwExpected("test03_newPath");
}
if (!result.equals(expected)) {
throwWrong("test03_newPath", expected, result);
}
}
@org.junit.Test
public void test03_newPath() throws Exception {
FileSystem fs = config.getTestFileSystem(files, credentials);
String root = "/";
// test with null filesystem and null relative path
test03_newPath(null, null, null, true);
// test with correct filesystem and null relative path
test03_newPath(fs, null, null, true);
// test with correct filesystem and empty relative path
test03_newPath(fs, new RelativePath(), root, false);
// test with correct filesystem and relativepath with value
test03_newPath(fs, new RelativePath("test"), root + "test", false);
files.close(fs);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createDirectory
//
// Possible parameters:
//
// Path null / non-existing dir / existing dir / existing file / non-exising parent / closed filesystem
//
// Total combinations : 5
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], [createTestFile],
// createDirectory, [deleteTestDir], [deleteTestFile], [closeTestFileSystem]
private void test04_createDirectory(Path path, boolean mustFail) throws Exception {
try {
files.createDirectory(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test04_createDirectory", e);
}
if (mustFail) {
throwExpected("test04_createDirectory");
}
}
@org.junit.Test
public void test04_createDirectory() throws Exception {
// test with null
test04_createDirectory(null, true);
Path cwd = config.getWorkingDir(files, credentials);
Path root = resolve(cwd, TEST_ROOT);
// test with non-existing dir
test04_createDirectory(root, false);
// test with existing dir
test04_createDirectory(root, true);
// test with existing file
Path file0 = createTestFile(root, null);
test04_createDirectory(file0, true);
deleteTestFile(file0);
// test with non-existent parent dir
Path parent = createNewTestDirName(root);
Path dir0 = createNewTestDirName(parent);
test04_createDirectory(dir0, true);
// cleanup
deleteTestDir(root);
// close test FS
files.close(cwd.getFileSystem());
if (config.supportsClose()) {
// test with closed fs
test04_createDirectory(root, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createDirectories
//
// Possible parameters:
//
// Path null / non-existing dir / existing dir / dir with existing parents / dir with non existing parents /
// dir where last parent is file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test05_createDirectories(Path path, boolean mustFail) throws Exception {
try {
files.createDirectories(path);
assert (files.exists(path));
FileAttributes att = files.getAttributes(path);
assert (att.isDirectory());
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test05_createDirectories", e);
}
if (mustFail) {
throwExpected("createDirectory");
}
}
@org.junit.Test
public void test05_createDirectories() throws Exception {
// test with null
test05_createDirectories(null, true);
Path cwd = config.getWorkingDir(files, credentials);
Path root = resolve(cwd, TEST_ROOT, "test05_createDirectories");
// test with non-existing dir
test05_createDirectories(root, false);
// test with existing dir
test05_createDirectories(root, true);
// dir with existing parents
Path dir0 = createNewTestDirName(root);
test05_createDirectories(dir0, false);
deleteTestDir(dir0);
// dir with non-existing parents
Path dir1 = createNewTestDirName(dir0);
test05_createDirectories(dir1, false);
// dir where last parent is file
Path file0 = createTestFile(dir0, null);
Path dir2 = createNewTestDirName(file0);
test05_createDirectories(dir2, true);
// cleanup
deleteTestDir(dir1);
deleteTestFile(file0);
deleteTestDir(dir0);
deleteTestDir(root);
// close test FS
files.close(cwd.getFileSystem());
if (config.supportsClose()) {
// test with closed fs
test05_createDirectories(root, true);
}
}
// From this point on we can use prepareTestDir
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: isDirectory
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / closed filesystem
//
// Total combinations : 4
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile]
// [closeTestFileSystem]
//
// private void test06_isDirectory(Path path, boolean expected, boolean mustFail) throws Exception {
//
// boolean result = false;
//
// try {
// result = files.isDirectory(path);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test06_isDirectory", e);
// }
//
// if (mustFail) {
// throwExpected("test06_isDirectory");
// }
//
// if (result != expected) {
// throwWrong("test06_isDirectory", "" + expected, "" + result);
// }
// }
//
// @org.junit.Test
// public void test06_isDirectory() throws Exception {
//
//
//
// // prepare
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test06_isDirectory");
//
// // test with null
// test06_isDirectory(null, false, true);
//
// // test with non-existing file
// Path file0 = createNewTestFileName(testDir);
// test06_isDirectory(file0, false, false);
//
// // test with existing file
// Path file1 = createTestFile(testDir, null);
// test06_isDirectory(file1, false, false);
// deleteTestFile(file1);
//
// // test with existing dir
// test06_isDirectory(testDir, true, false);
//
// // cleanup
// deleteTestDir(testDir);
// config.closeTestFileSystem(files, fs);
//
// if (config.supportsClose()) {
// // test with closed filesystem
// test06_isDirectory(testDir, true, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createFile
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / non-existing parent / closed filesystem
//
// Total combinations : 6
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], createFile, delete, [deleteTestDir]
// [closeTestFileSystem]
private void test07_createFile(Path path, boolean mustFail) throws Exception {
try {
files.createFile(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test07_createFile", e);
}
if (mustFail) {
throwExpected("test07_createFile");
}
}
@org.junit.Test
public void test07_createFile() throws Exception {
// prepare
prepareTestDir("test07_createFile");
// test with null
test07_createFile(null, true);
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test07_createFile(file0, false);
// test with existing file
test07_createFile(file0, true);
// test with existing dir
test07_createFile(testDir, true);
Path tmp = createNewTestDirName(testDir);
Path file1 = createNewTestFileName(tmp);
// test with non-existing parent
test07_createFile(file1, true);
// cleanup
files.delete(file0);
deleteTestDir(testDir);
// close test FS
closeTestFS();
if (config.supportsClose()) {
// test with closed filesystem
test07_createFile(file0, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: exists
//
// Possible parameters:
//
// Path null / non-existing file / existing file
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile],
// [closeTestFileSystem], exists
private void test08_exists(Path path, boolean expected, boolean mustFail) throws Exception {
boolean result = false;
try {
result = files.exists(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test08_exists", e);
}
if (mustFail) {
throwExpected("test08_exists");
}
if (result != expected) {
throwWrong("test08_exists", expected, result);
}
}
@org.junit.Test
public void test08_exists() throws Exception {
// prepare
prepareTestDir("test08_exists");
// test with null
test08_exists(null, false, true);
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test08_exists(file0, false, false);
// test with existing file
Path file1 = createTestFile(testDir, null);
test08_exists(file1, true, false);
deleteTestFile(file1);
// cleanup
deleteTestDir(testDir);
// close test FS
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: delete
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing empty dir / existing non-empty dir /
// existing non-writable file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], delete, [deleteTestFile], [deleteTestDir]
// [closeTestFileSystem]
private void test09_delete(Path path, boolean mustFail) throws Exception {
try {
files.delete(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test09_delete", e);
}
if (files.exists(path)) {
throwWrong("test09_delete", "no file", "a file");
}
if (mustFail) {
throwExpected("test09_delete");
}
}
@org.junit.Test
public void test09_delete() throws Exception {
// test with null
test09_delete(null, true);
prepareTestDir("test09_delete");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test09_delete(file0, true);
// test with existing file
Path file1 = createTestFile(testDir, null);
test09_delete(file1, false);
// test with existing empty dir
Path dir0 = createTestDir(testDir);
test09_delete(dir0, false);
// test with existing non-empty dir
Path dir1 = createTestDir(testDir);
Path file2 = createTestFile(dir1, null);
test09_delete(dir1, true);
// test with non-writable file
// Path file3 = createTestFile(testDir, null);
// files.setPosixFilePermissions(file3, new HashSet<PosixFilePermission>());
// System.err.println("Attempting to delete: " + file3.getPath() + " " + files.getAttributes(file3));
// test09_delete(file3, true);
// cleanup
deleteTestFile(file2);
deleteTestDir(dir1);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test09_delete(testDir, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: size
//
// Possible parameters:
//
// Path null / non-existing file / existing file size 0 / existing file size N / file from closed FS
//
// Total combinations : 5
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile],
// [deleteTestDir], [closeTestFileSystem], size, close
// private void test10_size(Path path, long expected, boolean mustFail) throws Exception {
//
// long result = -1;
//
// try {
// result = files.size(path);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test10_size", e);
// }
//
// if (mustFail) {
// throwExpected("test10_size");
// }
//
// if (result != expected) {
// throwWrong("test10_size", "" + expected, "" + result);
// }
// }
//
// @org.junit.Test
// public void test10_size() throws Exception {
//
//
//
// // test with null parameter
// test10_size(null, -1, true);
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test10_size");
//
// // test with non existing file
// Path file1 = createNewTestFileName(testDir);
// test10_size(file1, -1, true);
//
// // test with existing empty file
// Path file2 = createTestFile(testDir, new byte[0]);
// test10_size(file2, 0, false);
// deleteTestFile(file2);
//
// // test with existing filled file
// Path file3 = createTestFile(testDir, new byte[13]);
// test10_size(file3, 13, false);
// deleteTestFile(file3);
//
// // test with dir
// Path dir0 = createTestDir(testDir);
// test10_size(dir0, 0, false);
// deleteTestDir(dir0);
// deleteTestDir(testDir);
//
// // test with closed filesystem
// if (config.supportsClose()) {
// config.closeTestFileSystem(files, fs);
// test10_size(file1, 0, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newDirectoryStream
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestDirName], [createTestFile], newDirectoryStream,
// [deleteTestDir], , [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test11_newDirectoryStream(Path root, Set<Path> expected, boolean mustFail) throws Exception {
Set<Path> tmp = new HashSet<Path>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<Path> in = null;
try {
in = files.newDirectoryStream(root);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test11_newDirectoryStream", e);
}
if (mustFail) {
close(in);
throwExpected("test11_newDirectoryStream");
}
for (Path p : in) {
if (tmp.contains(p)) {
tmp.remove(p);
} else {
close(in);
throwUnexpectedElement("test11_newDirectoryStream", p);
}
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test11_newDirectoryStream", tmp);
}
}
@org.junit.Test
public void test11_newDirectoryStream() throws Exception {
// test with null
test11_newDirectoryStream(null, null, true);
prepareTestDir("test11_newDirectoryStream");
// test with empty dir
test11_newDirectoryStream(testDir, null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test11_newDirectoryStream(dir0, null, true);
// test with exising file
Path file0 = createTestFile(testDir, null);
test11_newDirectoryStream(file0, null, true);
// test with non-empty dir
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
tmp.add(file3);
test11_newDirectoryStream(testDir, tmp, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
tmp.add(dir1);
test11_newDirectoryStream(testDir, tmp, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test11_newDirectoryStream(testDir, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newDirectoryStream with filter
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// directoryStreams.Filter null / filter returns all / filter returns none / filter selects one.
// Total combinations : 7 + 8
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
public void test12_newDirectoryStream(Path root, DirectoryStream.Filter filter, Set<Path> expected, boolean mustFail)
throws Exception {
Set<Path> tmp = new HashSet<Path>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<Path> in = null;
try {
in = files.newDirectoryStream(root, filter);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test12_newDirectoryStream_with_filter", e);
}
if (mustFail) {
close(in);
throwExpected("test12_newDirectoryStream_with_filter");
}
Iterator<Path> itt = in.iterator();
while (itt.hasNext()) {
Path p = itt.next();
if (p == null) {
throwUnexpectedElement("test12_newDirectoryStream_with_filter", null);
}
if (tmp.contains(p)) {
tmp.remove(p);
} else {
close(in);
throwUnexpectedElement("test12_newDirectoryStream_with_filter", p.toString());
}
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test12_newDirectoryStream_with_filter", tmp);
}
// close(in); // double close should result in exception
}
@org.junit.Test
public void test12_newDirectoryStream_with_filter() throws Exception {
// test with null
test12_newDirectoryStream(null, null, null, true);
prepareTestDir("test12_newDirectoryStream_with_filter");
// test with empty dir + null filter
test12_newDirectoryStream(testDir, null, null, true);
// test with empty dir + true filter
test12_newDirectoryStream(testDir, new AllTrue(), null, false);
// test with empty dir + false filter
test12_newDirectoryStream(testDir, new AllTrue(), null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test12_newDirectoryStream(dir0, new AllTrue(), null, true);
// test with existing file
Path file0 = createTestFile(testDir, null);
test12_newDirectoryStream(file0, new AllTrue(), null, true);
// test with non-empty dir and allTrue
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
tmp.add(file3);
test12_newDirectoryStream(testDir, new AllTrue(), tmp, false);
// test with non-empty dir and allFalse
test12_newDirectoryStream(testDir, new AllFalse(), null, false);
tmp.remove(file3);
// test with non-empty dir and select
test12_newDirectoryStream(testDir, new Select(tmp), tmp, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
test12_newDirectoryStream(testDir, new Select(tmp), tmp, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test12_newDirectoryStream(testDir, new AllTrue(), null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: getAttributes
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / existing link (!)
// closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test13_getAttributes(Path path, boolean isDirectory, long size, long currentTime, boolean mustFail)
throws Exception {
FileAttributes result = null;
try {
result = files.getAttributes(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test13_getFileAttributes", e);
}
if (mustFail) {
throwExpected("test13_getFileAttributes");
}
if (result.isDirectory() && !isDirectory) {
throwWrong("test13_getfileAttributes", "<not directory>", "<directory>");
}
if (size >= 0 && result.size() != size) {
throwWrong("test13_getfileAttributes", "size=" + size, "size=" + result.size());
}
if (isWithinMargin(currentTime, result.lastModifiedTime()) == false) {
throwWrong("test13_getfileAttributes", "lastModifiedTime=" + currentTime,
"lastModifiedTime=" + result.lastModifiedTime());
}
if (isWithinMargin(currentTime, result.creationTime()) == false) {
throwWrong("test13_getfileAttributes", "creationTime=" + currentTime, "creationTime=" + result.creationTime());
}
if (isWithinMargin(currentTime, result.lastAccessTime()) == false) {
throwWrong("test13_getfileAttributes", "lastAccessTime=" + currentTime, "lastAccessTime=" + result.lastAccessTime());
}
System.err.println("File " + path + " has attributes: " + result.isReadable() + " " + result.isWritable() + " "
+ result.isExecutable() + " " + result.isSymbolicLink() + " " + result.isDirectory() + " "
+ result.isRegularFile() + " " + result.isHidden() + " " + result.isOther() + " " + result.lastAccessTime() + " "
+ result.lastModifiedTime());
}
/**
* Tests whether two times (in milliseconds) are within a mild margin of one another. The margin is large enough to be able to
* cope with servers in other timezones and similar, expected, sources of discrepancy between times.
*
* @param time1
* @param time2
* @return
*/
private boolean isWithinMargin(long time1, long time2) {
final int millisecondsPerSecond = 1000;
final int secondsPerHour = 3600;
final long margin = 30 * secondsPerHour * millisecondsPerSecond;
return Math.abs(time1 - time2) < margin;
}
@org.junit.Test
public void test13_getAttributes() throws Exception {
long currentTime = System.currentTimeMillis();
// test with null
test13_getAttributes(null, false, -1, currentTime, true);
prepareTestDir("test13_getAttributes");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test13_getAttributes(file0, false, -1, currentTime, true);
// test with existing empty file
Path file1 = createTestFile(testDir, null);
test13_getAttributes(file1, false, 0, currentTime, false);
// test with existing non-empty file
Path file2 = createTestFile(testDir, new byte[] { 1, 2, 3 });
test13_getAttributes(file2, false, 3, currentTime, false);
// test with existing dir
Path dir0 = createTestDir(testDir);
test13_getAttributes(dir0, true, -1, currentTime, false);
// TODO: test with link!
deleteTestDir(dir0);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test13_getAttributes(testDir, false, -1, currentTime, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: setPosixFilePermissions
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / existing link (!) / closed filesystem
// Set<PosixFilePermission> null / empty set / [various correct set]
//
// Total combinations : N
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test14_setPosixFilePermissions(Path path, Set<PosixFilePermission> permissions, boolean mustFail)
throws Exception {
try {
files.setPosixFilePermissions(path, permissions);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test14_setPosixFilePermissions", e);
}
if (mustFail) {
throwExpected("test14_setPosixFilePermissions");
}
// Check result
FileAttributes attributes = files.getAttributes(path);
Set<PosixFilePermission> tmp = attributes.permissions();
if (!permissions.equals(tmp)) {
throwWrong("test14_setPosixFilePermissions", permissions, tmp);
}
}
@org.junit.Test
public void test14_setPosixFilePermissions() throws Exception {
if (!config.supportsPosixPermissions()) {
return;
}
// test with null, null
test14_setPosixFilePermissions(null, null, true);
prepareTestDir("test14_setPosixFilePermissions");
// test with existing file, null set
Path file0 = createTestFile(testDir, null);
test14_setPosixFilePermissions(file0, null, true);
// test with existing file, empty set
Set<PosixFilePermission> permissions = new HashSet<PosixFilePermission>();
test14_setPosixFilePermissions(file0, permissions, false);
// test with existing file, non-empty set
permissions.add(PosixFilePermission.OWNER_EXECUTE);
permissions.add(PosixFilePermission.OWNER_READ);
permissions.add(PosixFilePermission.OWNER_WRITE);
test14_setPosixFilePermissions(file0, permissions, false);
permissions.add(PosixFilePermission.OTHERS_READ);
test14_setPosixFilePermissions(file0, permissions, false);
permissions.add(PosixFilePermission.GROUP_READ);
test14_setPosixFilePermissions(file0, permissions, false);
// test with non-existing file
Path file1 = createNewTestFileName(testDir);
test14_setPosixFilePermissions(file1, permissions, true);
// test with existing dir
Path dir0 = createTestDir(testDir);
permissions.add(PosixFilePermission.OWNER_EXECUTE);
permissions.add(PosixFilePermission.OWNER_READ);
permissions.add(PosixFilePermission.OWNER_WRITE);
test14_setPosixFilePermissions(dir0, permissions, false);
permissions.add(PosixFilePermission.OTHERS_READ);
test14_setPosixFilePermissions(dir0, permissions, false);
permissions.add(PosixFilePermission.GROUP_READ);
test14_setPosixFilePermissions(dir0, permissions, false);
deleteTestDir(dir0);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test14_setPosixFilePermissions(file0, permissions, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newAttributesDirectoryStream
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestDirName], [createTestFile], newDirectoryStream,
// [deleteTestDir], , [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test15_newAttributesDirectoryStream(Path root, Set<PathAttributesPair> expected, boolean mustFail)
throws Exception {
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<PathAttributesPair> in = null;
try {
in = files.newAttributesDirectoryStream(root);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test15_newAttributesDirectoryStream", e);
}
if (mustFail) {
close(in);
throwExpected("test15_newAttributesDirectoryStream");
}
System.err.println("Comparing PathAttributesPairs:");
for (PathAttributesPair p : in) {
System.err.println("Got input " + p.path() + " " + p.attributes());
PathAttributesPair found = null;
for (PathAttributesPair x : tmp) {
System.err.println(" Comparing to " + x.path() + " " + x.attributes());
if (x.path().equals(p.path()) && x.attributes().equals(p.attributes())) {
System.err.println("Found!");
found = x;
break;
}
}
System.err.println(" Found = " + found);
if (found != null) {
tmp.remove(found);
} else {
System.err.println("NOT Found!");
close(in);
throwUnexpectedElement("test15_newAttributesDirectoryStream", p.path());
}
// if (tmp.contains(p)) {
// System.err.println("Found!");
// tmp.remove(p);
// } else {
// System.err.println("NOT Found!");
//
// close(in);
// throwUnexpectedElement("newAttributesDirectoryStream", p.path().getPath());
// }
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test15_newAttributesDirectoryStream", tmp);
}
}
@org.junit.Test
public void test15_newAttrributesDirectoryStream() throws Exception {
// test with null
test15_newAttributesDirectoryStream(null, null, true);
prepareTestDir("test15_newAttrributesDirectoryStream");
// test with empty dir
test15_newAttributesDirectoryStream(testDir, null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test15_newAttributesDirectoryStream(dir0, null, true);
// test with exising file
Path file0 = createTestFile(testDir, null);
test15_newAttributesDirectoryStream(file0, null, true);
// test with non-empty dir
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<PathAttributesPair> result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
result.add(new PathAttributesPairImplementation(file3, files.getAttributes(file3)));
test15_newAttributesDirectoryStream(testDir, result, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
result.add(new PathAttributesPairImplementation(dir1, files.getAttributes(dir1)));
test15_newAttributesDirectoryStream(testDir, result, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test15_newAttributesDirectoryStream(testDir, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newAttributesDirectoryStream with filter
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// directoryStreams.Filter null / filter returns all / filter returns none / filter selects one.
// Total combinations : 7 + 8
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test16_newAttributesDirectoryStream(Path root, DirectoryStream.Filter filter, Set<PathAttributesPair> expected,
boolean mustFail) throws Exception {
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<PathAttributesPair> in = null;
try {
in = files.newAttributesDirectoryStream(root, filter);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test16_newAttributesDirectoryDirectoryStream_with_filter", e);
}
if (mustFail) {
close(in);
throwExpected("test16_newAttributesDirectoryDirectoryStream_with_filter");
}
for (PathAttributesPair p : in) {
System.err.println("Got input " + p.path() + " " + p.attributes());
PathAttributesPair found = null;
for (PathAttributesPair x : tmp) {
System.err.println(" Comparing to " + x.path() + " " + x.attributes());
if (x.path().equals(p.path()) && x.attributes().equals(p.attributes())) {
System.err.println("Found!");
found = x;
break;
}
}
System.err.println(" Found = " + found);
if (found != null) {
tmp.remove(found);
} else {
System.err.println("NOT Found!");
close(in);
throwUnexpectedElement("test16_newAttributesDirectoryStream_with_filter", p.path());
}
// if (tmp.contains(p)) {
// System.err.println("Found!");
// tmp.remove(p);
// } else {
// System.err.println("NOT Found!");
//
// close(in);
// throwUnexpectedElement("newAttributesDirectoryStream", p.path().getPath());
// }
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test16_newAttributesDirectoryDirectoryStream_with_filter", tmp);
}
}
@org.junit.Test
public void test15_newAttributesDirectoryStream_with_filter() throws Exception {
// test with null
test16_newAttributesDirectoryStream(null, null, null, true);
prepareTestDir("test15_newAttributesDirectoryStream_with_filter");
// test with empty dir + null filter
test16_newAttributesDirectoryStream(testDir, null, null, true);
// test with empty dir + true filter
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, false);
// test with empty dir + false filter
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test16_newAttributesDirectoryStream(dir0, new AllTrue(), null, true);
// test with existing file
Path file0 = createTestFile(testDir, null);
test16_newAttributesDirectoryStream(file0, new AllTrue(), null, true);
// test with non-empty dir and allTrue
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<PathAttributesPair> result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
result.add(new PathAttributesPairImplementation(file3, files.getAttributes(file3)));
test16_newAttributesDirectoryStream(testDir, new AllTrue(), result, false);
// test with non-empty dir and allFalse
test16_newAttributesDirectoryStream(testDir, new AllFalse(), null, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
result.add(new PathAttributesPairImplementation(dir1, files.getAttributes(dir1)));
test16_newAttributesDirectoryStream(testDir, new AllTrue(), result, false);
// test with non-empty dir and select
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
test16_newAttributesDirectoryStream(testDir, new Select(tmp), result, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newInputStream
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
//
// Total combinations : 6
//
// Depends on:
private void test20_newInputStream(Path file, byte[] expected, boolean mustFail) throws Exception {
InputStream in = null;
try {
in = files.newInputStream(file);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test20_newInputStream", e);
}
if (mustFail) {
close(in);
throwExpected("test20_newInputStream");
}
byte[] data = readFully(in);
if (expected == null) {
if (data.length != 0) {
throwWrong("test20_newInputStream", "zero bytes", data.length + " bytes");
}
return;
}
if (expected.length != data.length) {
throwWrong("test20_newInputStream", expected.length + " bytes", data.length + " bytes");
}
if (!Arrays.equals(expected, data)) {
throwWrong("test20_newInputStream", Arrays.toString(expected), Arrays.toString(data));
}
}
@org.junit.Test
public void test20_newInputStream() throws Exception {
byte[] data = "Hello World".getBytes();
// test with null
test20_newInputStream(null, null, true);
prepareTestDir("test20_newInputStream");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test20_newInputStream(file0, null, true);
// test with existing empty file
Path file1 = createTestFile(testDir, null);
test20_newInputStream(file1, null, false);
// test with existing non-empty file
Path file2 = createTestFile(testDir, data);
test20_newInputStream(file2, data, false);
// test with existing dir
Path dir0 = createTestDir(testDir);
test20_newInputStream(dir0, null, true);
// cleanup
deleteTestFile(file1);
deleteTestFile(file2);
deleteTestDir(dir0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test20_newInputStream(file2, data, true);
}
}
@org.junit.Test
public void test20b_newInputStreamDoubleClose() throws Exception {
// See what happens when we close an in input stream twice and then reopen the stream. This failed
// on the SSH adaptor due to a bug in the sftp channel cache.
byte[] data = "Hello World".getBytes();
prepareTestDir("test20b_newInputStreamDoubleClose");
Path file = createTestFile(testDir, data);
InputStream in = null;
try {
in = files.newInputStream(file);
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
// should not fail
in.close();
} catch (Exception e) {
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
in.close();
} catch (Exception e) {
// should fail
}
try {
in = files.newInputStream(file);
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
in.close();
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
deleteTestFile(file);
deleteTestDir(testDir);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newOuputStream
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// OpenOption null / CREATE / OPEN / OPEN_OR_CREATE / READ / TRUNCATE / READ / WRITE + combinations
//
// Total combinations : N
//
// Depends on:
private void test21_newOutputStream(Path path, OpenOption[] options, byte[] data, byte[] expected, boolean mustFail)
throws Exception {
OutputStream out = null;
try {
out = files.newOutputStream(path, options);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test21_newOutputStream", e);
}
if (mustFail) {
close(out);
throwExpected("test21_newOutputStream");
}
out.write(data);
close(out);
InputStream in = files.newInputStream(path);
byte[] tmp = readFully(in);
if (expected == null) {
if (data.length != 0) {
throwWrong("test21_newOutputStream", "zero bytes", tmp.length + " bytes");
}
return;
}
if (expected.length != tmp.length) {
throwWrong("test21_newOutputStream", expected.length + " bytes", tmp.length + " bytes");
}
if (!Arrays.equals(expected, tmp)) {
throwWrong("test21_newOutputStream", Arrays.toString(expected), Arrays.toString(tmp));
}
}
@org.junit.Test
public void test21_newOutputStream() throws Exception {
byte[] data = "Hello World".getBytes();
byte[] data2 = "Hello WorldHello World".getBytes();
// test with null
test21_newOutputStream(null, null, null, null, true);
prepareTestDir("test21_newOuputStream");
// test with existing file and null options
Path file0 = createTestFile(testDir, null);
test21_newOutputStream(file0, null, null, null, true);
// test with existing file and empty options
test21_newOutputStream(file0, new OpenOption[0], null, null, true);
// test with existing file and CREATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE }, null, null, true);
// test with existing file and OPEN option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN }, null, null, true);
// test with existing file and OPEN_OR_CREATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE }, null, null, true);
// test with existing file and APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.APPEND }, null, null, true);
// test with existing file and TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.TRUNCATE }, null, null, true);
// test with existing file and READ option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.READ }, null, null, true);
// test with existing file and WRITE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.WRITE }, null, null, true);
// test with existing file and CREATE + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
// test with existing file and CREATE + APPEND + READ option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND, OpenOption.READ }, null, null,
true);
// test with existing file and OPEN_OR_CREATE + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, data, data, false);
// test with existing file and OPEN + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, data, data2, false);
// test with existing file and OPEN_OR_CREATE + APPEND + WRITE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.WRITE }, data, data,
false);
// test with existing file and CREATE + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.TRUNCATE }, null, null, true);
// test with existing file and OPEN_OR_CREATE + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.TRUNCATE }, data, data, false);
// test with existing file and OPEN + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE }, data, data, false);
deleteTestFile(file0);
// test with non-existing and CREATE + APPEND option
Path file1 = createNewTestFileName(testDir);
test21_newOutputStream(file1, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file1);
// test with non-existing and OPEN_OR_CREATE + APPEND option
Path file2 = createNewTestFileName(testDir);
test21_newOutputStream(file2, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file2);
// test with non-existing and OPEN + APPEND option
Path file3 = createNewTestFileName(testDir);
test21_newOutputStream(file3, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, null, null, true);
// test with exising dir
Path dir0 = createTestDir(testDir);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, null, null, true);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, null, null, true);
deleteTestDir(dir0);
// test with conflicting options
Path file4 = createTestFile(testDir, null);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.CREATE, OpenOption.OPEN, OpenOption.APPEND }, null, null,
true);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.APPEND }, null, null,
true);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND, OpenOption.READ }, null, null, true);
deleteTestFile(file4);
// test with non-existing and CREATE option
Path file5 = createNewTestFileName(testDir);
test21_newOutputStream(file5, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file5);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, null, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newByteChannel
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// OpenOption null / CREATE / OPEN / OPEN_OR_CREATE / READ / TRUNCATE / READ / WRITE + combinations
//
// Total combinations : N
//
// Depends on:
// public void test22_newByteChannel(Path path, OpenOption [] options, byte [] toWrite, byte [] toRead,
// boolean mustFail) throws Exception {
//
// if (!config.supportsNewByteChannel()) {
// return;
// }
//
// SeekableByteChannel channel = null;
//
// try {
// channel = files.newByteChannel(path, options);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test22_newByteChannel", e);
// }
//
// if (mustFail) {
// close(channel);
// throwExpected("test22_newByteChannel");
// }
//
// if (toWrite != null) {
// channel.write(ByteBuffer.wrap(toWrite));
// }
//
// if (toRead != null) {
//
// channel.position(0);
//
// byte [] tmp = readFully(channel);
//
// if (toRead.length != tmp.length) {
// throwWrong("test22_newByteChannel", toRead.length + " bytes", tmp.length + " bytes");
// }
//
// if (!Arrays.equals(toRead, tmp)) {
// throwWrong("test22_newByteChannel", Arrays.toString(toRead), Arrays.toString(tmp));
// }
// }
//
// close(channel);
// }
// @org.junit.Test
// public void test21_newByteChannel() throws Exception {
//
// if (!config.supportsNewByteChannel()) {
// return;
// }
//
// byte [] data = "Hello World".getBytes();
// byte [] data2 = "Hello WorldHello World".getBytes();
//
//
//
// // test with null
// test22_newByteChannel(null, null, null, null, true);
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test22_newByteChannel");
//
// // test with existing file and null options
// Path file0 = createTestFile(testDir, null);
// test22_newByteChannel(file0, null, null, null, true);
//
// // test with existing file and empty options
// test22_newByteChannel(file0, new OpenOption[0], null, null, true);
//
// // test with existing file and CREATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.CREATE }, null, null, true);
//
// // test with existing file and OPEN option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN }, null, null, true);
//
// // test with existing file and OPEN_OR_CREATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN_OR_CREATE }, null, null, true);
//
// // test with existing file and APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.APPEND }, null, null, true);
//
// // test with existing file and TRUNCATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.TRUNCATE }, null, null, true);
//
// // test with existing file and READ option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.READ }, null, null, true);
//
// // test with existing file and WRITE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.WRITE }, null, null, true);
//
// // test with existing file and CREATE + APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
//
// // test with existing file and OPEN + READ + APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN, OpenOption.READ, OpenOption.APPEND }, null, null, true);
//
// // test with existing file and OPEN + READ option
// Path file1 = createTestFile(testDir, data);
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, data, false);
//
// // Test with existing file and OPEN + APPEND + READ + WRITE
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.WRITE, OpenOption.READ }, data, data, false);
//
// // Test with existing file and OPEN + APPEND + READ + WRITE
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.APPEND, OpenOption.WRITE, OpenOption.READ }, null, null, true);
//
// // test with existing file and OPEN + WRITE without APPEND option
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.WRITE }, null, null, true);
//
// // test with existing file and CREATE + WRITE + APPEND
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.CREATE, OpenOption.WRITE, OpenOption.APPEND }, null, null, true);
//
// deleteTestFile(file1);
//
// // test with non-existing file and CREATE + WRITE + APPEND
// Path file2 = createNewTestFileName(testDir);
// test22_newByteChannel(file2, new OpenOption [] { OpenOption.CREATE, OpenOption.WRITE, OpenOption.APPEND }, data, null, false);
// test22_newByteChannel(file2, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, data, false);
// deleteTestFile(file2);
//
// // test with non-existing file and OPEN + READ
// Path file3 = createNewTestFileName(testDir);
// test22_newByteChannel(file3, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, null, true);
//
// // test with non-existing file and OPEN_OR_CREATE + WRITE + READ + APPEND
// Path file4 = createNewTestFileName(testDir);
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.WRITE, OpenOption.READ }, data, data, false);
//
// // test with existing file and OPEN_OR_CREATE + WRITE + READ + APPEND
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.WRITE, OpenOption.APPEND }, data,
// null, false);
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN, OpenOption.READ, }, null, data2, false);
//
// deleteTestFile(file0);
// deleteTestFile(file4);
//
// deleteTestDir(testDir);
//
// if (config.supportsClose()) {
// // test with closed fs
// config.closeTestFileSystem(files,fs);
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND, OpenOption.READ },
// null, null, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (synchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
private void test23_copy(Path source, Path target, CopyOption[] options, byte[] expected, boolean mustFail) throws Exception {
Copy copy;
try {
copy = files.copy(source, target, options);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test23_copy", e);
}
if (mustFail) {
throwExpected("test23_copy");
}
if (expected != null) {
byte[] tmp = readFully(files.newInputStream(target));
if (!Arrays.equals(expected, tmp)) {
throwWrong("test23_copy", Arrays.toString(expected), Arrays.toString(tmp));
}
}
}
@org.junit.Test
public void test23_copy() throws Exception {
byte[] data = "Hello World!".getBytes();
byte[] data2 = "Goodbye World!".getBytes();
byte[] data3 = "Hello World!Goodbye World!".getBytes();
byte[] data4 = "Hello World!Hello World!".getBytes();
byte[] data5 = "Hello World!Hello World!Hello World!".getBytes();
// test with null
test23_copy(null, null, null, null, true);
prepareTestDir("test23_copy");
Path file0 = createTestFile(testDir, data);
// test without target
test23_copy(file0, null, new CopyOption[] { CopyOption.CREATE }, null, true);
// test without source
test23_copy(null, file0, new CopyOption[] { CopyOption.CREATE }, null, true);
Path file1 = createNewTestFileName(testDir);
Path file2 = createNewTestFileName(testDir);
Path file3 = createNewTestFileName(testDir);
Path file4 = createTestFile(testDir, data2);
Path file5 = createTestFile(testDir, data3);
Path dir0 = createTestDir(testDir);
Path dir1 = createNewTestDirName(testDir);
Path file6 = createNewTestFileName(dir1);
// test copy with non-existing source
test23_copy(file1, file2, new CopyOption[0], null, true);
// test copy with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy using conflicting options should fail
test23_copy(file0, file1, new CopyOption[] { CopyOption.IGNORE, CopyOption.CREATE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.IGNORE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.REPLACE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.RESUME }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.APPEND }, null, true);
// test copy with non-existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE }, data, false);
test23_copy(file0, file2, new CopyOption[] { CopyOption.CREATE, CopyOption.CREATE }, data, false);
// test copy with non-existing target with non-existing parent
test23_copy(file0, file6, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy with existing target
test23_copy(file0, file1, new CopyOption[0], null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy with same target as source
test23_copy(file0, file0, new CopyOption[] { CopyOption.CREATE }, data, false);
// test ignore with existing target
test23_copy(file4, file1, new CopyOption[] { CopyOption.IGNORE }, data, false);
test23_copy(file4, file1, new CopyOption[] { CopyOption.IGNORE, CopyOption.IGNORE }, data, false);
// test resume with existing target
test23_copy(file4, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
test23_copy(file1, file5, new CopyOption[] { CopyOption.RESUME }, null, true);
test23_copy(file5, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, data3, false);
test23_copy(file5, file1, new CopyOption[] { CopyOption.RESUME }, data3, false);
test23_copy(file5, file2, new CopyOption[] { CopyOption.RESUME, CopyOption.RESUME }, data3, false);
test23_copy(file4, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with non-existing source
test23_copy(file3, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with non-exising target
test23_copy(file5, file3, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with dir target
test23_copy(file5, dir0, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with same dir and target
test23_copy(file5, file5, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, data3, false);
// test replace with existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE }, data, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.REPLACE }, data, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.VERIFY }, null, true);
// test append with existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.APPEND }, data4, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.APPEND, CopyOption.APPEND }, data5, false);
// test append with non-existing source
test23_copy(file3, file1, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with non-existing target
test23_copy(file0, file3, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with dir target
test23_copy(file0, dir0, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with source equals target
test23_copy(file0, file0, new CopyOption[] { CopyOption.APPEND }, null, true);
// test with source equals target and empty option
test23_copy(file0, file0, new CopyOption[] { null }, null, true);
deleteTestDir(dir0);
deleteTestFile(file5);
deleteTestFile(file4);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (asynchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
@org.junit.Test
public void test24_copy_async() throws Exception {
byte[] data = "Hello World!".getBytes();
prepareTestDir("test24_copy_async");
Path file0 = createTestFile(testDir, data);
Path file1 = createNewTestFileName(testDir);
// Test the async copy
Copy copy = files.copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.ASYNCHRONOUS });
CopyStatus status = files.getCopyStatus(copy);
while (!status.isDone()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// ignored
}
status = files.getCopyStatus(copy);
}
// Test the cancel
copy = files.copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.ASYNCHRONOUS });
status = files.cancelCopy(copy);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (synchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
@org.junit.Test
public void test25_getLocalCWD() throws Exception {
if (config.supportsLocalCWD()) {
try {
Utils.getLocalCWD(files);
} catch (Exception e) {
throwUnexpected("test25_getLocalCWD", e);
}
}
}
@org.junit.Test
public void test26_getLocalHomeFileSystem() throws Exception {
if (config.supportsLocalHome()) {
try {
Utils.getLocalHome(files);
} catch (Exception e) {
throwUnexpected("test26_getLocalHomeFileSystem", e);
}
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: move
//
// Possible parameters:
//
// source null / non-existing file / existing file / existing dir
// target null / non-existing file / existing file / non-existing parent dir / existing dir
// + closed filesystem
//
// Total combinations :
//
// Depends on:
private void test27_move(Path source, Path target, boolean mustFail) throws Exception {
try {
files.move(source, target);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test27_move", e);
}
if (mustFail) {
throwExpected("test27_move");
}
RelativePath sourceName = source.getRelativePath().normalize();
RelativePath targetName = target.getRelativePath().normalize();
if (sourceName.equals(targetName)) {
// source == target, so the move did nothing.
return;
}
// make sure the source no longer exists, and the target does exist
if (files.exists(source)) {
throwWrong("test27_move", "no source file", "source file");
}
if (!files.exists(target)) {
throwWrong("test27_move", "target file", "no target file");
}
}
@org.junit.Test
public void test27_move() throws Exception {
test27_move(null, null, true);
prepareTestDir("test27_move");
// test with non-existing source
Path file0 = createNewTestFileName(testDir);
Path file1 = createNewTestFileName(testDir);
test27_move(file0, file1, true);
// test with existing source, non-existing target
Path file2 = createTestFile(testDir, null);
test27_move(file2, file0, false);
// test with existing source and target
Path file3 = createTestFile(testDir, null);
test27_move(file3, file0, true);
// test file existing source, and target with non-existing parent
Path dir0 = createNewTestDirName(testDir);
Path file4 = createNewTestFileName(dir0);
test27_move(file0, file4, true);
// test with source equals target
test27_move(file0, file0, false);
deleteTestFile(file0);
deleteTestFile(file3);
// test with existing dir
Path dir1 = createTestDir(testDir);
test27_move(dir1, file1, false);
deleteTestDir(file1);
deleteTestDir(testDir);
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: readSymbolicLink
//
// Possible parameters:
//
// link null / non-existing file / existing file / existing dir / existing link / broken link / closed filesystem
//
// Total combinations : 7
//
// Depends on:
private void test28_readSymbolicLink(Path link, Path expected, boolean mustFail) throws Exception {
Path target = null;
try {
target = files.readSymbolicLink(link);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test28_readSymboliclink", e);
}
if (mustFail) {
throwExpected("test28_readSymbolicLink");
}
// make sure the target is what was expected
if (expected != null && !target.equals(expected)) {
throwWrong("test28_readSymbolicLink", expected, target);
}
}
@org.junit.Test
public void test28_readSymbolicLink() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
// test with null
test28_readSymbolicLink(null, null, true);
prepareTestDir("test28_readSybmolicLink");
// test with non-exising file
Path file0 = createNewTestFileName(testDir);
test28_readSymbolicLink(file0, null, true);
// test with existing file
Path file1 = createTestFile(testDir, null);
test28_readSymbolicLink(file1, null, true);
deleteTestFile(file1);
// test with existing dir
Path dir0 = createTestDir(testDir);
test28_readSymbolicLink(dir0, null, true);
deleteTestDir(dir0);
deleteTestDir(testDir);
closeTestFS();
}
@org.junit.Test
public void test29_readSymbolicLink() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
Path file2 = resolve(root, "file2"); // does not exist
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
// link0 should point to file0
test28_readSymbolicLink(link0, file0, false);
// link1 should point to file1
test28_readSymbolicLink(link1, file1, false);
// link2 should point to file2 which fails
test28_readSymbolicLink(link2, file2, false);
// link3 should point to link0 which points to file0
test28_readSymbolicLink(link3, link0, false);
// link4 should point to link2 which points to file2
test28_readSymbolicLink(link4, link2, false);
// link5 should point to link6 which points to link5
test28_readSymbolicLink(link5, link6, false);
// link6 should point to link5 which points to link6
test28_readSymbolicLink(link6, link5, false);
}
// @org.junit.Test
// public void test30_isSymbolicLink() throws Exception {
//
//
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
//
// // Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
// Path root = fs.getEntryPath().resolve(new RelativePath("xenon_test/links"));
//
// if (!files.exists(root)) {
// throw new Exception("Cannot find symbolic link test dir at " + root.getPath());
// }
//
// // prepare the test files
// boolean v = files.isSymbolicLink(root.resolve(new RelativePath("file0")));
// assertFalse(v);
//
// v = files.isSymbolicLink(root.resolve(new RelativePath("link0")));
// assertTrue(v);
//
// v = files.isSymbolicLink(root.resolve(new RelativePath("file2")));
// assertFalse(v);
//
//
// }
@org.junit.Test
public void test31_newDirectoryStreamWithBrokenLinks() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(link0);
tmp.add(link1);
tmp.add(link2);
tmp.add(link3);
tmp.add(link4);
tmp.add(link5);
tmp.add(link6);
test11_newDirectoryStream(root, tmp, false);
}
@org.junit.Test
public void test32_newAttributesDirectoryStreamWithBrokenLinks() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
tmp.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
tmp.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
tmp.add(new PathAttributesPairImplementation(link0, files.getAttributes(link0)));
tmp.add(new PathAttributesPairImplementation(link1, files.getAttributes(link1)));
tmp.add(new PathAttributesPairImplementation(link2, files.getAttributes(link2)));
tmp.add(new PathAttributesPairImplementation(link3, files.getAttributes(link3)));
tmp.add(new PathAttributesPairImplementation(link4, files.getAttributes(link4)));
tmp.add(new PathAttributesPairImplementation(link5, files.getAttributes(link5)));
tmp.add(new PathAttributesPairImplementation(link6, files.getAttributes(link6)));
test15_newAttributesDirectoryStream(root, tmp, false);
}
/*
public Path readSymbolicLink(Path link) throws XenonException;
public boolean isSymbolicLink(Path path) throws XenonException;
*/
@org.junit.Test
public void test33_multipleFileSystemsOpenSimultaneously() throws Exception {
// Open two file systems. They should both be open afterwards.
FileSystem fs0 = files.newFileSystem(config.getScheme(), config.getCorrectLocation(),
config.getDefaultCredential(credentials), null);
FileSystem fs1 = files.newFileSystem(config.getScheme(), config.getCorrectLocation(),
config.getDefaultCredential(credentials), null);
assert (files.isOpen(fs0));
assert (files.isOpen(fs1));
// Close them both. We should get no exceptions.
files.close(fs0);
files.close(fs1);
}
}
| test/src/nl/esciencecenter/xenon/adaptors/GenericFileAdaptorTestParent.java | /*
* Copyright 2013 Netherlands eScience Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.esciencecenter.xenon.adaptors;
import java.io.Closeable;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import nl.esciencecenter.xenon.Xenon;
import nl.esciencecenter.xenon.XenonException;
import nl.esciencecenter.xenon.XenonFactory;
import nl.esciencecenter.xenon.credentials.Credential;
import nl.esciencecenter.xenon.credentials.Credentials;
import nl.esciencecenter.xenon.engine.files.PathAttributesPairImplementation;
import nl.esciencecenter.xenon.files.Copy;
import nl.esciencecenter.xenon.files.CopyOption;
import nl.esciencecenter.xenon.files.CopyStatus;
import nl.esciencecenter.xenon.files.DirectoryStream;
import nl.esciencecenter.xenon.files.FileAttributes;
import nl.esciencecenter.xenon.files.FileSystem;
import nl.esciencecenter.xenon.files.Files;
import nl.esciencecenter.xenon.files.OpenOption;
import nl.esciencecenter.xenon.files.Path;
import nl.esciencecenter.xenon.files.PathAttributesPair;
import nl.esciencecenter.xenon.files.PosixFilePermission;
import nl.esciencecenter.xenon.files.RelativePath;
import nl.esciencecenter.xenon.util.Utils;
import org.junit.After;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runners.MethodSorters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Jason Maassen <[email protected]>
*
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public abstract class GenericFileAdaptorTestParent {
private static final Logger logger = LoggerFactory.getLogger(GenericFileAdaptorTestParent.class);
protected static String TEST_ROOT;
public static FileTestConfig config;
protected Xenon xenon;
protected Files files;
protected Credentials credentials;
protected Path testDir;
private long counter = 0;
@Rule
public TestWatcher watcher = new TestWatcher() {
@Override
public void starting(Description description) {
logger.info("Running test {}", description.getMethodName());
}
@Override
public void failed(Throwable reason, Description description) {
logger.info("Test {} failed due to exception", description.getMethodName(), reason);
}
@Override
public void succeeded(Description description) {
logger.info("Test {} succeeded", description.getMethodName());
}
@Override
public void skipped(AssumptionViolatedException reason, Description description) {
logger.info("Test {} skipped due to failed assumption", description.getMethodName(), reason);
}
};
// MUST be invoked by a @BeforeClass method of the subclass!
public static void prepareClass(FileTestConfig testConfig) throws Exception {
config = testConfig;
TEST_ROOT = "xenon_test_" + config.getAdaptorName() + "_" + System.currentTimeMillis();
}
// MUST be invoked by a @AfterClass method of the subclass!
public static void cleanupClass() throws Exception {
System.err.println("GenericFileAdaptorTest.cleanupClass() attempting to remove: " + TEST_ROOT);
Xenon xenon = XenonFactory.newXenon(null);
Files files = xenon.files();
Credentials credentials = xenon.credentials();
Path p = config.getWorkingDir(files, credentials);
Path root = files.newPath(p.getFileSystem(), p.getRelativePath().resolve(TEST_ROOT));
if (files.exists(root)) {
files.delete(root);
}
XenonFactory.endXenon(xenon);
}
public Path resolve(Path root, String... path) throws XenonException {
return files.newPath(root.getFileSystem(), root.getRelativePath().resolve(new RelativePath(path)));
}
@Before
public void prepare() throws Exception {
xenon = XenonFactory.newXenon(null);
files = xenon.files();
credentials = xenon.credentials();
}
@After
public void cleanup() throws Exception {
XenonFactory.endXenon(xenon);
files = null;
xenon = null;
}
// Various util functions ------------------------------------------------------------
class AllTrue implements DirectoryStream.Filter {
@Override
public boolean accept(Path entry) {
return true;
}
}
class AllFalse implements DirectoryStream.Filter {
@Override
public boolean accept(Path entry) {
return false;
}
}
class Select implements DirectoryStream.Filter {
private Set<Path> set;
public Select(Set<Path> set) {
this.set = set;
}
@Override
public boolean accept(Path entry) {
return set.contains(entry);
}
}
private void throwUnexpected(String name, Exception e) throws Exception {
throw new Exception(name + " throws unexpected Exception!", e);
}
private void throwExpected(String name) throws Exception {
throw new Exception(name + " did NOT throw Exception which was expected!");
}
private void throwWrong(String name, Object expected, Object result) throws Exception {
throw new Exception(name + " produced wrong result! Expected: " + expected + " but got: " + result);
}
private void throwUnexpectedElement(String name, Object element) throws Exception {
throw new Exception(name + " produced unexpected element: " + element);
}
// private void throwMissingElement(String name, String element) throws Exception {
//
// throw new Exception(name + " did NOT produce element: " + element);
// }
private void throwMissingElements(String name, Collection elements) throws Exception {
throw new Exception(name + " did NOT produce elements: " + elements);
}
private void close(Closeable c) {
if (c == null) {
return;
}
try {
c.close();
} catch (Exception e) {
// ignore
}
}
// Depends on: Path.resolve, RelativePath, exists
private Path createNewTestDirName(Path root) throws Exception {
Path dir = resolve(root, "dir" + counter);
counter++;
if (files.exists(dir)) {
throw new Exception("Generated test dir already exists! " + dir);
}
return dir;
}
// Depends on: [createNewTestDirName], createDirectory, exists
private Path createTestDir(Path root) throws Exception {
Path dir = createNewTestDirName(root);
files.createDirectory(dir);
if (!files.exists(dir)) {
throw new Exception("Failed to generate test dir! " + dir);
}
return dir;
}
// Depends on: [createTestDir]
protected void prepareTestDir(String testName) throws Exception {
Path p = config.getWorkingDir(files, credentials);
if (testDir != null) {
return;
}
testDir = resolve(p, TEST_ROOT, testName);
if (!files.exists(testDir)) {
files.createDirectories(testDir);
}
}
// Depends on: [createTestDir]
private void closeTestFS() throws Exception {
if (testDir == null) {
return;
}
files.close(testDir.getFileSystem());
testDir = null;
}
// Depends on: Path.resolve, RelativePath, exists
private Path createNewTestFileName(Path root) throws Exception {
Path file = resolve(root, "file" + counter);
counter++;
if (files.exists(file)) {
throw new Exception("Generated NEW test file already exists! " + file);
}
return file;
}
// Depends on: newOutputStream
private void writeData(Path testFile, byte[] data) throws Exception {
OutputStream out = files.newOutputStream(testFile, OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.WRITE);
if (data != null) {
out.write(data);
}
out.close();
}
// Depends on: [createNewTestFileName], createFile, [writeData]
protected Path createTestFile(Path root, byte[] data) throws Exception {
Path file = createNewTestFileName(root);
files.createFile(file);
if (data != null && data.length > 0) {
writeData(file, data);
}
return file;
}
// Depends on: exists, isDirectory, delete
private void deleteTestFile(Path file) throws Exception {
if (!files.exists(file)) {
throw new Exception("Cannot delete non-existing file: " + file);
}
FileAttributes att = files.getAttributes(file);
if (att.isDirectory()) {
throw new Exception("Cannot delete directory: " + file);
}
files.delete(file);
}
// Depends on: exists, isDirectory, delete
protected void deleteTestDir(Path dir) throws Exception {
if (!files.exists(dir)) {
throw new Exception("Cannot delete non-existing dir: " + dir);
}
FileAttributes att = files.getAttributes(dir);
if (!att.isDirectory()) {
throw new Exception("Cannot delete file: " + dir);
}
files.delete(dir);
}
private byte[] readFully(InputStream in) throws Exception {
byte[] buffer = new byte[1024];
int offset = 0;
int read = in.read(buffer, offset, buffer.length - offset);
while (read != -1) {
offset += read;
if (offset == buffer.length) {
buffer = Arrays.copyOf(buffer, buffer.length * 2);
}
read = in.read(buffer, offset, buffer.length - offset);
}
close(in);
return Arrays.copyOf(buffer, offset);
}
// private byte [] readFully(SeekableByteChannel channel) throws Exception {
//
// ByteBuffer buffer = ByteBuffer.allocate(1024);
//
// int read = channel.read(buffer);
//
// while (read != -1) {
//
// System.err.println("READ from channel " + read);
//
// if (buffer.position() == buffer.limit()) {
// ByteBuffer tmp = ByteBuffer.allocate(buffer.limit()*2);
// buffer.flip();
// tmp.put(buffer);
// buffer = tmp;
// }
//
// read = channel.read(buffer);
// }
//
// close(channel);
//
// buffer.flip();
// byte [] tmp = new byte[buffer.remaining()];
// buffer.get(tmp);
//
// System.err.println("Returning byte[" + tmp.length + "]");
//
// return tmp;
// }
// The test start here.
// ---------------------------------------------------------------------------------------------------------------------------
// TEST newFileSystem
//
// Possible parameters:
// URI - correct URI / wrong user / wrong location / wrong path
// Credentials - default / null / value
// Properties - null / empty / set right / set wrong
//
// Total combinations: 4 + 2 + 3 = 9
//
// Depends on: newFileSystem, close
private void test00_newFileSystem(String scheme, String location, Credential c, Map<String, String> p, boolean mustFail)
throws Exception {
try {
FileSystem fs = files.newFileSystem(scheme, location, c, p);
files.close(fs);
} catch (Exception e) {
if (mustFail) {
// exception was expected.
return;
}
// exception was not expected
throwUnexpected("test00_newFileSystem", e);
}
if (mustFail) {
// expected an exception!
throwExpected("test00_newFileSystem");
}
}
@org.junit.Test
public void test00_newFileSystem_nullUriAndCredentials_shouldThrow() throws Exception {
test00_newFileSystem(null, null, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nullCredentials_shouldThrow() throws Exception {
test00_newFileSystem(config.getScheme(), null, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nullProperties_throwConditionally() throws Exception {
// test with correct URI without credential and without properties
boolean allowNull = config.supportNullCredential();
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), null, null, !allowNull);
}
@org.junit.Test
public void test00_newFileSystem_correctArguments_noThrow() throws Exception {
// test with correct scheme with, correct location, location
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials), null,
false);
}
@org.junit.Test
public void test00_newFileSystem_wrongLocation_throw() throws Exception {
// test with correct scheme with, wrong location
test00_newFileSystem(config.getScheme(), config.getWrongLocation(), config.getDefaultCredential(credentials), null, true);
}
@org.junit.Test
public void test00_newFileSystem_userInUriIfSupported_noThrow() throws Exception {
if (!config.supportUserInUri()) {
return;
}
String uriWithUsername = config.getCorrectLocationWithUser();
test00_newFileSystem(config.getScheme(), uriWithUsername, null, null, false);
}
@org.junit.Test
public void test00_newFileSystem_wrongUserInUriIfSupported_noThrow() throws Exception {
if (!config.supportUserInUri()) {
return;
}
String uriWithWrongUser = config.getCorrectLocationWithWrongUser();
test00_newFileSystem(config.getScheme(), uriWithWrongUser, null, null, true);
}
@org.junit.Test
public void test00_newFileSystem_nonDefaultCredentialIfSupported_noThrow() throws Exception {
Credential nonDefaultCredential = config.getNonDefaultCredential(credentials);
if (config.supportNonDefaultCredential()) {
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), nonDefaultCredential, null, false);
}
}
@org.junit.Test
public void test00_newFileSystem_emptyProperties_noThrow() throws Exception {
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
new HashMap<String, String>(), false);
}
@org.junit.Test
public void test00_newFileSystem_correctProperties_noThrow() throws Exception {
if (config.supportsProperties()) {
test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
config.getCorrectProperties(), false);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST isOpen
//
// Possible parameters:
//
// FileSystem - null / open FS / closed FS
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], close, isOpen
private void test01_isOpen(FileSystem fs, boolean expected, boolean mustFail) throws Exception {
boolean result = false;
try {
result = files.isOpen(fs);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test01_isOpen", e);
}
if (mustFail) {
throwExpected("test01_isOpen");
}
if (result != expected) {
throwWrong("test01_isOpen", expected, result);
}
}
@org.junit.Test
public void test01_isOpen() throws Exception {
// test with null filesystem
test01_isOpen(null, false, true);
FileSystem fs = config.getTestFileSystem(files, credentials);
// test with correct open filesystem
test01_isOpen(fs, true, false);
if (config.supportsClose()) {
files.close(fs);
// test with correct closed filesystem
test01_isOpen(fs, false, false);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST close
//
// Possible parameters:
//
// FileSystem - null / open FS / closed FS
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], close
private void test02_close(FileSystem fs, boolean mustFail) throws Exception {
try {
files.close(fs);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test02_close", e);
}
if (mustFail) {
throwExpected("test02_close");
}
}
@org.junit.Test
public void test02_close() throws Exception {
// test with null filesystem
test02_close(null, true);
if (config.supportsClose()) {
FileSystem fs = config.getTestFileSystem(files, credentials);
// test with correct open filesystem
test02_close(fs, false);
// test with correct closed filesystem
test02_close(fs, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST newPath
//
// Possible parameters:
//
// FileSystem - null / correct
// RelativePath - null / empty / value
//
// Total combinations : 2
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), Path.getPath(), RelativePath, close
private void test03_newPath(FileSystem fs, RelativePath path, String expected, boolean mustFail) throws Exception {
String result = null;
try {
result = files.newPath(fs, path).getRelativePath().getAbsolutePath();
} catch (Exception e) {
if (mustFail) {
// expected exception
return;
}
throwUnexpected("test03_newPath", e);
}
if (mustFail) {
throwExpected("test03_newPath");
}
if (!result.equals(expected)) {
throwWrong("test03_newPath", expected, result);
}
}
@org.junit.Test
public void test03_newPath() throws Exception {
FileSystem fs = config.getTestFileSystem(files, credentials);
String root = "/";
// test with null filesystem and null relative path
test03_newPath(null, null, null, true);
// test with correct filesystem and null relative path
test03_newPath(fs, null, null, true);
// test with correct filesystem and empty relative path
test03_newPath(fs, new RelativePath(), root, false);
// test with correct filesystem and relativepath with value
test03_newPath(fs, new RelativePath("test"), root + "test", false);
files.close(fs);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createDirectory
//
// Possible parameters:
//
// Path null / non-existing dir / existing dir / existing file / non-exising parent / closed filesystem
//
// Total combinations : 5
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], [createTestFile],
// createDirectory, [deleteTestDir], [deleteTestFile], [closeTestFileSystem]
private void test04_createDirectory(Path path, boolean mustFail) throws Exception {
try {
files.createDirectory(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test04_createDirectory", e);
}
if (mustFail) {
throwExpected("test04_createDirectory");
}
}
@org.junit.Test
public void test04_createDirectory() throws Exception {
// test with null
test04_createDirectory(null, true);
Path cwd = config.getWorkingDir(files, credentials);
Path root = resolve(cwd, TEST_ROOT);
// test with non-existing dir
test04_createDirectory(root, false);
// test with existing dir
test04_createDirectory(root, true);
// test with existing file
Path file0 = createTestFile(root, null);
test04_createDirectory(file0, true);
deleteTestFile(file0);
// test with non-existent parent dir
Path parent = createNewTestDirName(root);
Path dir0 = createNewTestDirName(parent);
test04_createDirectory(dir0, true);
// cleanup
deleteTestDir(root);
// close test FS
files.close(cwd.getFileSystem());
if (config.supportsClose()) {
// test with closed fs
test04_createDirectory(root, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createDirectories
//
// Possible parameters:
//
// Path null / non-existing dir / existing dir / dir with existing parents / dir with non existing parents /
// dir where last parent is file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test05_createDirectories(Path path, boolean mustFail) throws Exception {
try {
files.createDirectories(path);
assert (files.exists(path));
FileAttributes att = files.getAttributes(path);
assert (att.isDirectory());
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test05_createDirectories", e);
}
if (mustFail) {
throwExpected("createDirectory");
}
}
@org.junit.Test
public void test05_createDirectories() throws Exception {
// test with null
test05_createDirectories(null, true);
Path cwd = config.getWorkingDir(files, credentials);
Path root = resolve(cwd, TEST_ROOT, "test05_createDirectories");
// test with non-existing dir
test05_createDirectories(root, false);
// test with existing dir
test05_createDirectories(root, true);
// dir with existing parents
Path dir0 = createNewTestDirName(root);
test05_createDirectories(dir0, false);
deleteTestDir(dir0);
// dir with non-existing parents
Path dir1 = createNewTestDirName(dir0);
test05_createDirectories(dir1, false);
// dir where last parent is file
Path file0 = createTestFile(dir0, null);
Path dir2 = createNewTestDirName(file0);
test05_createDirectories(dir2, true);
// cleanup
deleteTestDir(dir1);
deleteTestFile(file0);
deleteTestDir(dir0);
deleteTestDir(root);
// close test FS
files.close(cwd.getFileSystem());
if (config.supportsClose()) {
// test with closed fs
test05_createDirectories(root, true);
}
}
// From this point on we can use prepareTestDir
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: isDirectory
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / closed filesystem
//
// Total combinations : 4
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile]
// [closeTestFileSystem]
//
// private void test06_isDirectory(Path path, boolean expected, boolean mustFail) throws Exception {
//
// boolean result = false;
//
// try {
// result = files.isDirectory(path);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test06_isDirectory", e);
// }
//
// if (mustFail) {
// throwExpected("test06_isDirectory");
// }
//
// if (result != expected) {
// throwWrong("test06_isDirectory", "" + expected, "" + result);
// }
// }
//
// @org.junit.Test
// public void test06_isDirectory() throws Exception {
//
//
//
// // prepare
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test06_isDirectory");
//
// // test with null
// test06_isDirectory(null, false, true);
//
// // test with non-existing file
// Path file0 = createNewTestFileName(testDir);
// test06_isDirectory(file0, false, false);
//
// // test with existing file
// Path file1 = createTestFile(testDir, null);
// test06_isDirectory(file1, false, false);
// deleteTestFile(file1);
//
// // test with existing dir
// test06_isDirectory(testDir, true, false);
//
// // cleanup
// deleteTestDir(testDir);
// config.closeTestFileSystem(files, fs);
//
// if (config.supportsClose()) {
// // test with closed filesystem
// test06_isDirectory(testDir, true, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: createFile
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / non-existing parent / closed filesystem
//
// Total combinations : 6
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], createFile, delete, [deleteTestDir]
// [closeTestFileSystem]
private void test07_createFile(Path path, boolean mustFail) throws Exception {
try {
files.createFile(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test07_createFile", e);
}
if (mustFail) {
throwExpected("test07_createFile");
}
}
@org.junit.Test
public void test07_createFile() throws Exception {
// prepare
prepareTestDir("test07_createFile");
// test with null
test07_createFile(null, true);
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test07_createFile(file0, false);
// test with existing file
test07_createFile(file0, true);
// test with existing dir
test07_createFile(testDir, true);
Path tmp = createNewTestDirName(testDir);
Path file1 = createNewTestFileName(tmp);
// test with non-existing parent
test07_createFile(file1, true);
// cleanup
files.delete(file0);
deleteTestDir(testDir);
// close test FS
closeTestFS();
if (config.supportsClose()) {
// test with closed filesystem
test07_createFile(file0, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: exists
//
// Possible parameters:
//
// Path null / non-existing file / existing file
//
// Total combinations : 3
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile],
// [closeTestFileSystem], exists
private void test08_exists(Path path, boolean expected, boolean mustFail) throws Exception {
boolean result = false;
try {
result = files.exists(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test08_exists", e);
}
if (mustFail) {
throwExpected("test08_exists");
}
if (result != expected) {
throwWrong("test08_exists", expected, result);
}
}
@org.junit.Test
public void test08_exists() throws Exception {
// prepare
prepareTestDir("test08_exists");
// test with null
test08_exists(null, false, true);
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test08_exists(file0, false, false);
// test with existing file
Path file1 = createTestFile(testDir, null);
test08_exists(file1, true, false);
deleteTestFile(file1);
// cleanup
deleteTestDir(testDir);
// close test FS
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: delete
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing empty dir / existing non-empty dir /
// existing non-writable file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], delete, [deleteTestFile], [deleteTestDir]
// [closeTestFileSystem]
private void test09_delete(Path path, boolean mustFail) throws Exception {
try {
files.delete(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test09_delete", e);
}
if (files.exists(path)) {
throwWrong("test09_delete", "no file", "a file");
}
if (mustFail) {
throwExpected("test09_delete");
}
}
@org.junit.Test
public void test09_delete() throws Exception {
// test with null
test09_delete(null, true);
prepareTestDir("test09_delete");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test09_delete(file0, true);
// test with existing file
Path file1 = createTestFile(testDir, null);
test09_delete(file1, false);
// test with existing empty dir
Path dir0 = createTestDir(testDir);
test09_delete(dir0, false);
// test with existing non-empty dir
Path dir1 = createTestDir(testDir);
Path file2 = createTestFile(dir1, null);
test09_delete(dir1, true);
// test with non-writable file
// Path file3 = createTestFile(testDir, null);
// files.setPosixFilePermissions(file3, new HashSet<PosixFilePermission>());
// System.err.println("Attempting to delete: " + file3.getPath() + " " + files.getAttributes(file3));
// test09_delete(file3, true);
// cleanup
deleteTestFile(file2);
deleteTestDir(dir1);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test09_delete(testDir, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: size
//
// Possible parameters:
//
// Path null / non-existing file / existing file size 0 / existing file size N / file from closed FS
//
// Total combinations : 5
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestFileName], [createTestFile], [deleteTestFile],
// [deleteTestDir], [closeTestFileSystem], size, close
// private void test10_size(Path path, long expected, boolean mustFail) throws Exception {
//
// long result = -1;
//
// try {
// result = files.size(path);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test10_size", e);
// }
//
// if (mustFail) {
// throwExpected("test10_size");
// }
//
// if (result != expected) {
// throwWrong("test10_size", "" + expected, "" + result);
// }
// }
//
// @org.junit.Test
// public void test10_size() throws Exception {
//
//
//
// // test with null parameter
// test10_size(null, -1, true);
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test10_size");
//
// // test with non existing file
// Path file1 = createNewTestFileName(testDir);
// test10_size(file1, -1, true);
//
// // test with existing empty file
// Path file2 = createTestFile(testDir, new byte[0]);
// test10_size(file2, 0, false);
// deleteTestFile(file2);
//
// // test with existing filled file
// Path file3 = createTestFile(testDir, new byte[13]);
// test10_size(file3, 13, false);
// deleteTestFile(file3);
//
// // test with dir
// Path dir0 = createTestDir(testDir);
// test10_size(dir0, 0, false);
// deleteTestDir(dir0);
// deleteTestDir(testDir);
//
// // test with closed filesystem
// if (config.supportsClose()) {
// config.closeTestFileSystem(files, fs);
// test10_size(file1, 0, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newDirectoryStream
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestDirName], [createTestFile], newDirectoryStream,
// [deleteTestDir], , [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test11_newDirectoryStream(Path root, Set<Path> expected, boolean mustFail) throws Exception {
Set<Path> tmp = new HashSet<Path>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<Path> in = null;
try {
in = files.newDirectoryStream(root);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test11_newDirectoryStream", e);
}
if (mustFail) {
close(in);
throwExpected("test11_newDirectoryStream");
}
for (Path p : in) {
if (tmp.contains(p)) {
tmp.remove(p);
} else {
close(in);
throwUnexpectedElement("test11_newDirectoryStream", p);
}
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test11_newDirectoryStream", tmp);
}
}
@org.junit.Test
public void test11_newDirectoryStream() throws Exception {
// test with null
test11_newDirectoryStream(null, null, true);
prepareTestDir("test11_newDirectoryStream");
// test with empty dir
test11_newDirectoryStream(testDir, null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test11_newDirectoryStream(dir0, null, true);
// test with exising file
Path file0 = createTestFile(testDir, null);
test11_newDirectoryStream(file0, null, true);
// test with non-empty dir
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
tmp.add(file3);
test11_newDirectoryStream(testDir, tmp, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
tmp.add(dir1);
test11_newDirectoryStream(testDir, tmp, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test11_newDirectoryStream(testDir, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newDirectoryStream with filter
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// directoryStreams.Filter null / filter returns all / filter returns none / filter selects one.
// Total combinations : 7 + 8
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
public void test12_newDirectoryStream(Path root, DirectoryStream.Filter filter, Set<Path> expected, boolean mustFail)
throws Exception {
Set<Path> tmp = new HashSet<Path>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<Path> in = null;
try {
in = files.newDirectoryStream(root, filter);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test12_newDirectoryStream_with_filter", e);
}
if (mustFail) {
close(in);
throwExpected("test12_newDirectoryStream_with_filter");
}
Iterator<Path> itt = in.iterator();
while (itt.hasNext()) {
Path p = itt.next();
if (p == null) {
throwUnexpectedElement("test12_newDirectoryStream_with_filter", null);
}
if (tmp.contains(p)) {
tmp.remove(p);
} else {
close(in);
throwUnexpectedElement("test12_newDirectoryStream_with_filter", p.toString());
}
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test12_newDirectoryStream_with_filter", tmp);
}
// close(in); // double close should result in exception
}
@org.junit.Test
public void test12_newDirectoryStream_with_filter() throws Exception {
// test with null
test12_newDirectoryStream(null, null, null, true);
prepareTestDir("test12_newDirectoryStream_with_filter");
// test with empty dir + null filter
test12_newDirectoryStream(testDir, null, null, true);
// test with empty dir + true filter
test12_newDirectoryStream(testDir, new AllTrue(), null, false);
// test with empty dir + false filter
test12_newDirectoryStream(testDir, new AllTrue(), null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test12_newDirectoryStream(dir0, new AllTrue(), null, true);
// test with existing file
Path file0 = createTestFile(testDir, null);
test12_newDirectoryStream(file0, new AllTrue(), null, true);
// test with non-empty dir and allTrue
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
tmp.add(file3);
test12_newDirectoryStream(testDir, new AllTrue(), tmp, false);
// test with non-empty dir and allFalse
test12_newDirectoryStream(testDir, new AllFalse(), null, false);
tmp.remove(file3);
// test with non-empty dir and select
test12_newDirectoryStream(testDir, new Select(tmp), tmp, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
test12_newDirectoryStream(testDir, new Select(tmp), tmp, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test12_newDirectoryStream(testDir, new AllTrue(), null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: getAttributes
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / existing link (!)
// closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test13_getAttributes(Path path, boolean isDirectory, long size, long currentTime, boolean mustFail)
throws Exception {
FileAttributes result = null;
try {
result = files.getAttributes(path);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test13_getFileAttributes", e);
}
if (mustFail) {
throwExpected("test13_getFileAttributes");
}
if (result.isDirectory() && !isDirectory) {
throwWrong("test13_getfileAttributes", "<not directory>", "<directory>");
}
if (size >= 0 && result.size() != size) {
throwWrong("test13_getfileAttributes", "size=" + size, "size=" + result.size());
}
if (isWithinMargin(currentTime, result.lastModifiedTime()) == false) {
throwWrong("test13_getfileAttributes", "lastModifiedTime=" + currentTime,
"lastModifiedTime=" + result.lastModifiedTime());
}
if (isWithinMargin(currentTime, result.creationTime()) == false) {
throwWrong("test13_getfileAttributes", "creationTime=" + currentTime, "creationTime=" + result.creationTime());
}
if (isWithinMargin(currentTime, result.lastAccessTime()) == false) {
throwWrong("test13_getfileAttributes", "lastAccessTime=" + currentTime, "lastAccessTime=" + result.lastAccessTime());
}
System.err.println("File " + path + " has attributes: " + result.isReadable() + " " + result.isWritable() + " "
+ result.isExecutable() + " " + result.isSymbolicLink() + " " + result.isDirectory() + " "
+ result.isRegularFile() + " " + result.isHidden() + " " + result.isOther() + " " + result.lastAccessTime() + " "
+ result.lastModifiedTime());
}
/**
* Tests whether two times (in milliseconds) are within a mild margin of one another. The margin is large enough to be able to
* cope with servers in other timezones and similar, expected, sources of discrepancy between times.
*
* @param time1
* @param time2
* @return
*/
private boolean isWithinMargin(long time1, long time2) {
final int millisecondsPerSecond = 1000;
final int secondsPerHour = 3600;
final long margin = 30 * secondsPerHour * millisecondsPerSecond;
return Math.abs(time1 - time2) < margin;
}
@org.junit.Test
public void test13_getAttributes() throws Exception {
long currentTime = System.currentTimeMillis();
// test with null
test13_getAttributes(null, false, -1, currentTime, true);
prepareTestDir("test13_getAttributes");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test13_getAttributes(file0, false, -1, currentTime, true);
// test with existing empty file
Path file1 = createTestFile(testDir, null);
test13_getAttributes(file1, false, 0, currentTime, false);
// test with existing non-empty file
Path file2 = createTestFile(testDir, new byte[] { 1, 2, 3 });
test13_getAttributes(file2, false, 3, currentTime, false);
// test with existing dir
Path dir0 = createTestDir(testDir);
test13_getAttributes(dir0, true, -1, currentTime, false);
// TODO: test with link!
deleteTestDir(dir0);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test13_getAttributes(testDir, false, -1, currentTime, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: setPosixFilePermissions
//
// Possible parameters:
//
// Path null / non-existing file / existing file / existing dir / existing link (!) / closed filesystem
// Set<PosixFilePermission> null / empty set / [various correct set]
//
// Total combinations : N
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test14_setPosixFilePermissions(Path path, Set<PosixFilePermission> permissions, boolean mustFail)
throws Exception {
try {
files.setPosixFilePermissions(path, permissions);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test14_setPosixFilePermissions", e);
}
if (mustFail) {
throwExpected("test14_setPosixFilePermissions");
}
// Check result
FileAttributes attributes = files.getAttributes(path);
Set<PosixFilePermission> tmp = attributes.permissions();
if (!permissions.equals(tmp)) {
throwWrong("test14_setPosixFilePermissions", permissions, tmp);
}
}
@org.junit.Test
public void test14_setPosixFilePermissions() throws Exception {
if (!config.supportsPosixPermissions()) {
return;
}
// test with null, null
test14_setPosixFilePermissions(null, null, true);
prepareTestDir("test14_setPosixFilePermissions");
// test with existing file, null set
Path file0 = createTestFile(testDir, null);
test14_setPosixFilePermissions(file0, null, true);
// test with existing file, empty set
Set<PosixFilePermission> permissions = new HashSet<PosixFilePermission>();
test14_setPosixFilePermissions(file0, permissions, false);
// test with existing file, non-empty set
permissions.add(PosixFilePermission.OWNER_EXECUTE);
permissions.add(PosixFilePermission.OWNER_READ);
permissions.add(PosixFilePermission.OWNER_WRITE);
test14_setPosixFilePermissions(file0, permissions, false);
permissions.add(PosixFilePermission.OTHERS_READ);
test14_setPosixFilePermissions(file0, permissions, false);
permissions.add(PosixFilePermission.GROUP_READ);
test14_setPosixFilePermissions(file0, permissions, false);
// test with non-existing file
Path file1 = createNewTestFileName(testDir);
test14_setPosixFilePermissions(file1, permissions, true);
// test with existing dir
Path dir0 = createTestDir(testDir);
permissions.add(PosixFilePermission.OWNER_EXECUTE);
permissions.add(PosixFilePermission.OWNER_READ);
permissions.add(PosixFilePermission.OWNER_WRITE);
test14_setPosixFilePermissions(dir0, permissions, false);
permissions.add(PosixFilePermission.OTHERS_READ);
test14_setPosixFilePermissions(dir0, permissions, false);
permissions.add(PosixFilePermission.GROUP_READ);
test14_setPosixFilePermissions(dir0, permissions, false);
deleteTestDir(dir0);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test14_setPosixFilePermissions(file0, permissions, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newAttributesDirectoryStream
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// Total combinations : 7
//
// Depends on: [getTestFileSystem], [createTestDir], [createNewTestDirName], [createTestFile], newDirectoryStream,
// [deleteTestDir], , [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test15_newAttributesDirectoryStream(Path root, Set<PathAttributesPair> expected, boolean mustFail)
throws Exception {
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<PathAttributesPair> in = null;
try {
in = files.newAttributesDirectoryStream(root);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test15_newAttributesDirectoryStream", e);
}
if (mustFail) {
close(in);
throwExpected("test15_newAttributesDirectoryStream");
}
System.err.println("Comparing PathAttributesPairs:");
for (PathAttributesPair p : in) {
System.err.println("Got input " + p.path() + " " + p.attributes());
PathAttributesPair found = null;
for (PathAttributesPair x : tmp) {
System.err.println(" Comparing to " + x.path() + " " + x.attributes());
if (x.path().equals(p.path()) && x.attributes().equals(p.attributes())) {
System.err.println("Found!");
found = x;
break;
}
}
System.err.println(" Found = " + found);
if (found != null) {
tmp.remove(found);
} else {
System.err.println("NOT Found!");
close(in);
throwUnexpectedElement("test15_newAttributesDirectoryStream", p.path());
}
// if (tmp.contains(p)) {
// System.err.println("Found!");
// tmp.remove(p);
// } else {
// System.err.println("NOT Found!");
//
// close(in);
// throwUnexpectedElement("newAttributesDirectoryStream", p.path().getPath());
// }
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test15_newAttributesDirectoryStream", tmp);
}
}
@org.junit.Test
public void test15_newAttrributesDirectoryStream() throws Exception {
// test with null
test15_newAttributesDirectoryStream(null, null, true);
prepareTestDir("test15_newAttrributesDirectoryStream");
// test with empty dir
test15_newAttributesDirectoryStream(testDir, null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test15_newAttributesDirectoryStream(dir0, null, true);
// test with exising file
Path file0 = createTestFile(testDir, null);
test15_newAttributesDirectoryStream(file0, null, true);
// test with non-empty dir
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<PathAttributesPair> result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
result.add(new PathAttributesPairImplementation(file3, files.getAttributes(file3)));
test15_newAttributesDirectoryStream(testDir, result, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
result.add(new PathAttributesPairImplementation(dir1, files.getAttributes(dir1)));
test15_newAttributesDirectoryStream(testDir, result, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test15_newAttributesDirectoryStream(testDir, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newAttributesDirectoryStream with filter
//
// Possible parameters:
//
// Path null / non-existing dir / existing empty dir / existing non-empty dir / existing dir with subdirs /
// existing file / closed filesystem
//
// directoryStreams.Filter null / filter returns all / filter returns none / filter selects one.
// Total combinations : 7 + 8
//
// Depends on: [getTestFileSystem], FileSystem.getEntryPath(), [createNewTestDirName], createDirectories,
// [deleteTestDir], [createTestFile], [deleteTestFile], [deleteTestDir], [closeTestFileSystem]
private void test16_newAttributesDirectoryStream(Path root, DirectoryStream.Filter filter, Set<PathAttributesPair> expected,
boolean mustFail) throws Exception {
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
if (expected != null) {
tmp.addAll(expected);
}
DirectoryStream<PathAttributesPair> in = null;
try {
in = files.newAttributesDirectoryStream(root, filter);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test16_newAttributesDirectoryDirectoryStream_with_filter", e);
}
if (mustFail) {
close(in);
throwExpected("test16_newAttributesDirectoryDirectoryStream_with_filter");
}
for (PathAttributesPair p : in) {
System.err.println("Got input " + p.path() + " " + p.attributes());
PathAttributesPair found = null;
for (PathAttributesPair x : tmp) {
System.err.println(" Comparing to " + x.path() + " " + x.attributes());
if (x.path().equals(p.path()) && x.attributes().equals(p.attributes())) {
System.err.println("Found!");
found = x;
break;
}
}
System.err.println(" Found = " + found);
if (found != null) {
tmp.remove(found);
} else {
System.err.println("NOT Found!");
close(in);
throwUnexpectedElement("test16_newAttributesDirectoryStream_with_filter", p.path());
}
// if (tmp.contains(p)) {
// System.err.println("Found!");
// tmp.remove(p);
// } else {
// System.err.println("NOT Found!");
//
// close(in);
// throwUnexpectedElement("newAttributesDirectoryStream", p.path().getPath());
// }
}
close(in);
if (tmp.size() > 0) {
throwMissingElements("test16_newAttributesDirectoryDirectoryStream_with_filter", tmp);
}
}
@org.junit.Test
public void test15_newAttributesDirectoryStream_with_filter() throws Exception {
// test with null
test16_newAttributesDirectoryStream(null, null, null, true);
prepareTestDir("test15_newAttributesDirectoryStream_with_filter");
// test with empty dir + null filter
test16_newAttributesDirectoryStream(testDir, null, null, true);
// test with empty dir + true filter
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, false);
// test with empty dir + false filter
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, false);
// test with non-existing dir
Path dir0 = createNewTestDirName(testDir);
test16_newAttributesDirectoryStream(dir0, new AllTrue(), null, true);
// test with existing file
Path file0 = createTestFile(testDir, null);
test16_newAttributesDirectoryStream(file0, new AllTrue(), null, true);
// test with non-empty dir and allTrue
Path file1 = createTestFile(testDir, null);
Path file2 = createTestFile(testDir, null);
Path file3 = createTestFile(testDir, null);
Set<PathAttributesPair> result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
result.add(new PathAttributesPairImplementation(file3, files.getAttributes(file3)));
test16_newAttributesDirectoryStream(testDir, new AllTrue(), result, false);
// test with non-empty dir and allFalse
test16_newAttributesDirectoryStream(testDir, new AllFalse(), null, false);
// test with subdirs
Path dir1 = createTestDir(testDir);
Path file4 = createTestFile(dir1, null);
result.add(new PathAttributesPairImplementation(dir1, files.getAttributes(dir1)));
test16_newAttributesDirectoryStream(testDir, new AllTrue(), result, false);
// test with non-empty dir and select
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(file2);
result = new HashSet<PathAttributesPair>();
result.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
result.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
result.add(new PathAttributesPairImplementation(file2, files.getAttributes(file2)));
test16_newAttributesDirectoryStream(testDir, new Select(tmp), result, false);
deleteTestFile(file4);
deleteTestDir(dir1);
deleteTestFile(file3);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test16_newAttributesDirectoryStream(testDir, new AllTrue(), null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newInputStream
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
//
// Total combinations : 6
//
// Depends on:
private void test20_newInputStream(Path file, byte[] expected, boolean mustFail) throws Exception {
InputStream in = null;
try {
in = files.newInputStream(file);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test20_newInputStream", e);
}
if (mustFail) {
close(in);
throwExpected("test20_newInputStream");
}
byte[] data = readFully(in);
if (expected == null) {
if (data.length != 0) {
throwWrong("test20_newInputStream", "zero bytes", data.length + " bytes");
}
return;
}
if (expected.length != data.length) {
throwWrong("test20_newInputStream", expected.length + " bytes", data.length + " bytes");
}
if (!Arrays.equals(expected, data)) {
throwWrong("test20_newInputStream", Arrays.toString(expected), Arrays.toString(data));
}
}
@org.junit.Test
public void test20_newInputStream() throws Exception {
byte[] data = "Hello World".getBytes();
// test with null
test20_newInputStream(null, null, true);
prepareTestDir("test20_newInputStream");
// test with non-existing file
Path file0 = createNewTestFileName(testDir);
test20_newInputStream(file0, null, true);
// test with existing empty file
Path file1 = createTestFile(testDir, null);
test20_newInputStream(file1, null, false);
// test with existing non-empty file
Path file2 = createTestFile(testDir, data);
test20_newInputStream(file2, data, false);
// test with existing dir
Path dir0 = createTestDir(testDir);
test20_newInputStream(dir0, null, true);
// cleanup
deleteTestFile(file1);
deleteTestFile(file2);
deleteTestDir(dir0);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test20_newInputStream(file2, data, true);
}
}
@org.junit.Test
public void test20b_newInputStreamDoubleClose() throws Exception {
// See what happens when we close an in input stream twice and then reopen the stream. This failed
// on the SSH adaptor due to a bug in the sftp channel cache.
byte[] data = "Hello World".getBytes();
prepareTestDir("test20b_newInputStreamDoubleClose");
Path file = createTestFile(testDir, data);
InputStream in = null;
try {
in = files.newInputStream(file);
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
// should not fail
in.close();
} catch (Exception e) {
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
in.close();
} catch (Exception e) {
// should fail
}
try {
in = files.newInputStream(file);
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
try {
in.close();
} catch (Exception e) {
// should not fail
throwUnexpected("test20b_newInputStreamDoubleClose", e);
}
deleteTestFile(file);
deleteTestDir(testDir);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newOuputStream
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// OpenOption null / CREATE / OPEN / OPEN_OR_CREATE / READ / TRUNCATE / READ / WRITE + combinations
//
// Total combinations : N
//
// Depends on:
private void test21_newOutputStream(Path path, OpenOption[] options, byte[] data, byte[] expected, boolean mustFail)
throws Exception {
OutputStream out = null;
try {
out = files.newOutputStream(path, options);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test21_newOutputStream", e);
}
if (mustFail) {
close(out);
throwExpected("test21_newOutputStream");
}
out.write(data);
close(out);
InputStream in = files.newInputStream(path);
byte[] tmp = readFully(in);
if (expected == null) {
if (data.length != 0) {
throwWrong("test21_newOutputStream", "zero bytes", tmp.length + " bytes");
}
return;
}
if (expected.length != tmp.length) {
throwWrong("test21_newOutputStream", expected.length + " bytes", tmp.length + " bytes");
}
if (!Arrays.equals(expected, tmp)) {
throwWrong("test21_newOutputStream", Arrays.toString(expected), Arrays.toString(tmp));
}
}
@org.junit.Test
public void test21_newOutputStream() throws Exception {
byte[] data = "Hello World".getBytes();
byte[] data2 = "Hello WorldHello World".getBytes();
// test with null
test21_newOutputStream(null, null, null, null, true);
prepareTestDir("test21_newOuputStream");
// test with existing file and null options
Path file0 = createTestFile(testDir, null);
test21_newOutputStream(file0, null, null, null, true);
// test with existing file and empty options
test21_newOutputStream(file0, new OpenOption[0], null, null, true);
// test with existing file and CREATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE }, null, null, true);
// test with existing file and OPEN option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN }, null, null, true);
// test with existing file and OPEN_OR_CREATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE }, null, null, true);
// test with existing file and APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.APPEND }, null, null, true);
// test with existing file and TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.TRUNCATE }, null, null, true);
// test with existing file and READ option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.READ }, null, null, true);
// test with existing file and WRITE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.WRITE }, null, null, true);
// test with existing file and CREATE + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
// test with existing file and CREATE + APPEND + READ option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND, OpenOption.READ }, null, null,
true);
// test with existing file and OPEN_OR_CREATE + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, data, data, false);
// test with existing file and OPEN + APPEND option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, data, data2, false);
// test with existing file and OPEN_OR_CREATE + APPEND + WRITE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.WRITE }, data, data,
false);
// test with existing file and CREATE + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.CREATE, OpenOption.TRUNCATE }, null, null, true);
// test with existing file and OPEN_OR_CREATE + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.TRUNCATE }, data, data, false);
// test with existing file and OPEN + TRUNCATE option
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE }, data, data, false);
deleteTestFile(file0);
// test with non-existing and CREATE + APPEND option
Path file1 = createNewTestFileName(testDir);
test21_newOutputStream(file1, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file1);
// test with non-existing and OPEN_OR_CREATE + APPEND option
Path file2 = createNewTestFileName(testDir);
test21_newOutputStream(file2, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file2);
// test with non-existing and OPEN + APPEND option
Path file3 = createNewTestFileName(testDir);
test21_newOutputStream(file3, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, null, null, true);
// test with exising dir
Path dir0 = createTestDir(testDir);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, null, null, true);
test21_newOutputStream(dir0, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND }, null, null, true);
deleteTestDir(dir0);
// test with conflicting options
Path file4 = createTestFile(testDir, null);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.CREATE, OpenOption.OPEN, OpenOption.APPEND }, null, null,
true);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.OPEN, OpenOption.TRUNCATE, OpenOption.APPEND }, null, null,
true);
test21_newOutputStream(file4, new OpenOption[] { OpenOption.OPEN, OpenOption.APPEND, OpenOption.READ }, null, null, true);
deleteTestFile(file4);
// test with non-existing and CREATE option
Path file5 = createNewTestFileName(testDir);
test21_newOutputStream(file5, new OpenOption[] { OpenOption.CREATE, OpenOption.APPEND }, data, data, false);
deleteTestFile(file5);
deleteTestDir(testDir);
// Close test fs
closeTestFS();
if (config.supportsClose()) {
// test with closed fs
test21_newOutputStream(file0, new OpenOption[] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND }, null, null, true);
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: newByteChannel
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// OpenOption null / CREATE / OPEN / OPEN_OR_CREATE / READ / TRUNCATE / READ / WRITE + combinations
//
// Total combinations : N
//
// Depends on:
// public void test22_newByteChannel(Path path, OpenOption [] options, byte [] toWrite, byte [] toRead,
// boolean mustFail) throws Exception {
//
// if (!config.supportsNewByteChannel()) {
// return;
// }
//
// SeekableByteChannel channel = null;
//
// try {
// channel = files.newByteChannel(path, options);
// } catch (Exception e) {
//
// if (mustFail) {
// // expected
// return;
// }
//
// throwUnexpected("test22_newByteChannel", e);
// }
//
// if (mustFail) {
// close(channel);
// throwExpected("test22_newByteChannel");
// }
//
// if (toWrite != null) {
// channel.write(ByteBuffer.wrap(toWrite));
// }
//
// if (toRead != null) {
//
// channel.position(0);
//
// byte [] tmp = readFully(channel);
//
// if (toRead.length != tmp.length) {
// throwWrong("test22_newByteChannel", toRead.length + " bytes", tmp.length + " bytes");
// }
//
// if (!Arrays.equals(toRead, tmp)) {
// throwWrong("test22_newByteChannel", Arrays.toString(toRead), Arrays.toString(tmp));
// }
// }
//
// close(channel);
// }
// @org.junit.Test
// public void test21_newByteChannel() throws Exception {
//
// if (!config.supportsNewByteChannel()) {
// return;
// }
//
// byte [] data = "Hello World".getBytes();
// byte [] data2 = "Hello WorldHello World".getBytes();
//
//
//
// // test with null
// test22_newByteChannel(null, null, null, null, true);
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
// prepareTestDir(fs, "test22_newByteChannel");
//
// // test with existing file and null options
// Path file0 = createTestFile(testDir, null);
// test22_newByteChannel(file0, null, null, null, true);
//
// // test with existing file and empty options
// test22_newByteChannel(file0, new OpenOption[0], null, null, true);
//
// // test with existing file and CREATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.CREATE }, null, null, true);
//
// // test with existing file and OPEN option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN }, null, null, true);
//
// // test with existing file and OPEN_OR_CREATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN_OR_CREATE }, null, null, true);
//
// // test with existing file and APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.APPEND }, null, null, true);
//
// // test with existing file and TRUNCATE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.TRUNCATE }, null, null, true);
//
// // test with existing file and READ option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.READ }, null, null, true);
//
// // test with existing file and WRITE option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.WRITE }, null, null, true);
//
// // test with existing file and CREATE + APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.CREATE, OpenOption.APPEND }, null, null, true);
//
// // test with existing file and OPEN + READ + APPEND option
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN, OpenOption.READ, OpenOption.APPEND }, null, null, true);
//
// // test with existing file and OPEN + READ option
// Path file1 = createTestFile(testDir, data);
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, data, false);
//
// // Test with existing file and OPEN + APPEND + READ + WRITE
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.WRITE, OpenOption.READ }, data, data, false);
//
// // Test with existing file and OPEN + APPEND + READ + WRITE
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.APPEND, OpenOption.WRITE, OpenOption.READ }, null, null, true);
//
// // test with existing file and OPEN + WRITE without APPEND option
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.OPEN, OpenOption.WRITE }, null, null, true);
//
// // test with existing file and CREATE + WRITE + APPEND
// test22_newByteChannel(file1, new OpenOption [] { OpenOption.CREATE, OpenOption.WRITE, OpenOption.APPEND }, null, null, true);
//
// deleteTestFile(file1);
//
// // test with non-existing file and CREATE + WRITE + APPEND
// Path file2 = createNewTestFileName(testDir);
// test22_newByteChannel(file2, new OpenOption [] { OpenOption.CREATE, OpenOption.WRITE, OpenOption.APPEND }, data, null, false);
// test22_newByteChannel(file2, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, data, false);
// deleteTestFile(file2);
//
// // test with non-existing file and OPEN + READ
// Path file3 = createNewTestFileName(testDir);
// test22_newByteChannel(file3, new OpenOption [] { OpenOption.OPEN, OpenOption.READ }, null, null, true);
//
// // test with non-existing file and OPEN_OR_CREATE + WRITE + READ + APPEND
// Path file4 = createNewTestFileName(testDir);
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.WRITE, OpenOption.READ }, data, data, false);
//
// // test with existing file and OPEN_OR_CREATE + WRITE + READ + APPEND
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.WRITE, OpenOption.APPEND }, data,
// null, false);
// test22_newByteChannel(file4, new OpenOption [] { OpenOption.OPEN, OpenOption.READ, }, null, data2, false);
//
// deleteTestFile(file0);
// deleteTestFile(file4);
//
// deleteTestDir(testDir);
//
// if (config.supportsClose()) {
// // test with closed fs
// config.closeTestFileSystem(files,fs);
// test22_newByteChannel(file0, new OpenOption [] { OpenOption.OPEN_OR_CREATE, OpenOption.APPEND, OpenOption.READ },
// null, null, true);
// }
//
//
// }
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (synchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
private void test23_copy(Path source, Path target, CopyOption[] options, byte[] expected, boolean mustFail) throws Exception {
Copy copy;
try {
copy = files.copy(source, target, options);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test23_copy", e);
}
if (mustFail) {
throwExpected("test23_copy");
}
if (expected != null) {
byte[] tmp = readFully(files.newInputStream(target));
if (!Arrays.equals(expected, tmp)) {
throwWrong("test23_copy", Arrays.toString(expected), Arrays.toString(tmp));
}
}
}
@org.junit.Test
public void test23_copy() throws Exception {
byte[] data = "Hello World!".getBytes();
byte[] data2 = "Goodbye World!".getBytes();
byte[] data3 = "Hello World!Goodbye World!".getBytes();
byte[] data4 = "Hello World!Hello World!".getBytes();
byte[] data5 = "Hello World!Hello World!Hello World!".getBytes();
// test with null
test23_copy(null, null, null, null, true);
prepareTestDir("test23_copy");
Path file0 = createTestFile(testDir, data);
// test without target
test23_copy(file0, null, new CopyOption[] { CopyOption.CREATE }, null, true);
// test without source
test23_copy(null, file0, new CopyOption[] { CopyOption.CREATE }, null, true);
Path file1 = createNewTestFileName(testDir);
Path file2 = createNewTestFileName(testDir);
Path file3 = createNewTestFileName(testDir);
Path file4 = createTestFile(testDir, data2);
Path file5 = createTestFile(testDir, data3);
Path dir0 = createTestDir(testDir);
Path dir1 = createNewTestDirName(testDir);
Path file6 = createNewTestFileName(dir1);
// test copy with non-existing source
test23_copy(file1, file2, new CopyOption[0], null, true);
// test copy with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy using conflicting options should fail
test23_copy(file0, file1, new CopyOption[] { CopyOption.IGNORE, CopyOption.CREATE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.IGNORE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.REPLACE }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.RESUME }, null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.APPEND }, null, true);
// test copy with non-existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE }, data, false);
test23_copy(file0, file2, new CopyOption[] { CopyOption.CREATE, CopyOption.CREATE }, data, false);
// test copy with non-existing target with non-existing parent
test23_copy(file0, file6, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy with existing target
test23_copy(file0, file1, new CopyOption[0], null, true);
test23_copy(file0, file1, new CopyOption[] { CopyOption.CREATE }, null, true);
// test copy with same target as source
test23_copy(file0, file0, new CopyOption[] { CopyOption.CREATE }, data, false);
// test ignore with existing target
test23_copy(file4, file1, new CopyOption[] { CopyOption.IGNORE }, data, false);
test23_copy(file4, file1, new CopyOption[] { CopyOption.IGNORE, CopyOption.IGNORE }, data, false);
// test resume with existing target
test23_copy(file4, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
test23_copy(file1, file5, new CopyOption[] { CopyOption.RESUME }, null, true);
test23_copy(file5, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, data3, false);
test23_copy(file5, file1, new CopyOption[] { CopyOption.RESUME }, data3, false);
test23_copy(file5, file2, new CopyOption[] { CopyOption.RESUME, CopyOption.RESUME }, data3, false);
test23_copy(file4, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with non-existing source
test23_copy(file3, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with non-exising target
test23_copy(file5, file3, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with dir target
test23_copy(file5, dir0, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, null, true);
// test resume with same dir and target
test23_copy(file5, file5, new CopyOption[] { CopyOption.RESUME, CopyOption.VERIFY }, data3, false);
// test replace with existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE }, data, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.REPLACE }, data, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.VERIFY }, null, true);
// test append with existing target
test23_copy(file0, file1, new CopyOption[] { CopyOption.APPEND }, data4, false);
test23_copy(file0, file1, new CopyOption[] { CopyOption.APPEND, CopyOption.APPEND }, data5, false);
// test append with non-existing source
test23_copy(file3, file1, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with non-existing target
test23_copy(file0, file3, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with dir source
test23_copy(dir0, file1, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with dir target
test23_copy(file0, dir0, new CopyOption[] { CopyOption.APPEND }, null, true);
// test append with source equals target
test23_copy(file0, file0, new CopyOption[] { CopyOption.APPEND }, null, true);
// test with source equals target and empty option
test23_copy(file0, file0, new CopyOption[] { null }, null, true);
deleteTestDir(dir0);
deleteTestFile(file5);
deleteTestFile(file4);
deleteTestFile(file2);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (asynchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
@org.junit.Test
public void test24_copy_async() throws Exception {
byte[] data = "Hello World!".getBytes();
prepareTestDir("test24_copy_async");
Path file0 = createTestFile(testDir, data);
Path file1 = createNewTestFileName(testDir);
// Test the async copy
Copy copy = files.copy(file0, file1, new CopyOption[] { CopyOption.CREATE, CopyOption.ASYNCHRONOUS });
CopyStatus status = files.getCopyStatus(copy);
while (!status.isDone()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// ignored
}
status = files.getCopyStatus(copy);
}
// Test the cancel
copy = files.copy(file0, file1, new CopyOption[] { CopyOption.REPLACE, CopyOption.ASYNCHRONOUS });
status = files.cancelCopy(copy);
deleteTestFile(file1);
deleteTestFile(file0);
deleteTestDir(testDir);
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: copy (synchronous)
//
// Possible parameters:
//
// Path null / non-existing file / existing empty file / existing non-empty file / existing dir / closed filesystem
// CopyOptions null / CREATE / REPLACE / IGNORE / APPEND / RESUME / VERIFY / ASYNCHRONOUS
//
// Total combinations : N
//
// Depends on:
@org.junit.Test
public void test25_getLocalCWD() throws Exception {
if (config.supportsLocalCWD()) {
try {
Utils.getLocalCWD(files);
} catch (Exception e) {
throwUnexpected("test25_getLocalCWD", e);
}
}
}
@org.junit.Test
public void test26_getLocalHomeFileSystem() throws Exception {
if (config.supportsLocalHome()) {
try {
Utils.getLocalHome(files);
} catch (Exception e) {
throwUnexpected("test26_getLocalHomeFileSystem", e);
}
}
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: move
//
// Possible parameters:
//
// source null / non-existing file / existing file / existing dir
// target null / non-existing file / existing file / non-existing parent dir / existing dir
// + closed filesystem
//
// Total combinations :
//
// Depends on:
private void test27_move(Path source, Path target, boolean mustFail) throws Exception {
try {
files.move(source, target);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test27_move", e);
}
if (mustFail) {
throwExpected("test27_move");
}
RelativePath sourceName = source.getRelativePath().normalize();
RelativePath targetName = target.getRelativePath().normalize();
if (sourceName.equals(targetName)) {
// source == target, so the move did nothing.
return;
}
// make sure the source no longer exists, and the target does exist
if (files.exists(source)) {
throwWrong("test27_move", "no source file", "source file");
}
if (!files.exists(target)) {
throwWrong("test27_move", "target file", "no target file");
}
}
@org.junit.Test
public void test27_move() throws Exception {
test27_move(null, null, true);
prepareTestDir("test27_move");
// test with non-existing source
Path file0 = createNewTestFileName(testDir);
Path file1 = createNewTestFileName(testDir);
test27_move(file0, file1, true);
// test with existing source, non-existing target
Path file2 = createTestFile(testDir, null);
test27_move(file2, file0, false);
// test with existing source and target
Path file3 = createTestFile(testDir, null);
test27_move(file3, file0, true);
// test file existing source, and target with non-existing parent
Path dir0 = createNewTestDirName(testDir);
Path file4 = createNewTestFileName(dir0);
test27_move(file0, file4, true);
// test with source equals target
test27_move(file0, file0, false);
deleteTestFile(file0);
deleteTestFile(file3);
// test with existing dir
Path dir1 = createTestDir(testDir);
test27_move(dir1, file1, false);
deleteTestDir(file1);
deleteTestDir(testDir);
closeTestFS();
}
// ---------------------------------------------------------------------------------------------------------------------------
// TEST: readSymbolicLink
//
// Possible parameters:
//
// link null / non-existing file / existing file / existing dir / existing link / broken link / closed filesystem
//
// Total combinations : 7
//
// Depends on:
private void test28_readSymbolicLink(Path link, Path expected, boolean mustFail) throws Exception {
Path target = null;
try {
target = files.readSymbolicLink(link);
} catch (Exception e) {
if (mustFail) {
// expected
return;
}
throwUnexpected("test28_readSymboliclink", e);
}
if (mustFail) {
throwExpected("test28_readSymbolicLink");
}
// make sure the target is what was expected
if (expected != null && !target.equals(expected)) {
throwWrong("test28_readSymbolicLink", expected, target);
}
}
@org.junit.Test
public void test28_readSymbolicLink() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
// test with null
test28_readSymbolicLink(null, null, true);
prepareTestDir("test28_readSybmolicLink");
// test with non-exising file
Path file0 = createNewTestFileName(testDir);
test28_readSymbolicLink(file0, null, true);
// test with existing file
Path file1 = createTestFile(testDir, null);
test28_readSymbolicLink(file1, null, true);
deleteTestFile(file1);
// test with existing dir
Path dir0 = createTestDir(testDir);
test28_readSymbolicLink(dir0, null, true);
deleteTestDir(dir0);
deleteTestDir(testDir);
closeTestFS();
}
@org.junit.Test
public void test29_readSymbolicLink() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
Path file2 = resolve(root, "file2"); // does not exist
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
// link0 should point to file0
test28_readSymbolicLink(link0, file0, false);
// link1 should point to file1
test28_readSymbolicLink(link1, file1, false);
// link2 should point to file2 which fails
test28_readSymbolicLink(link2, file2, false);
// link3 should point to link0 which points to file0
test28_readSymbolicLink(link3, link0, false);
// link4 should point to link2 which points to file2
test28_readSymbolicLink(link4, link2, false);
// link5 should point to link6 which points to link5
test28_readSymbolicLink(link5, link6, false);
// link6 should point to link5 which points to link6
test28_readSymbolicLink(link6, link5, false);
}
// @org.junit.Test
// public void test30_isSymbolicLink() throws Exception {
//
//
//
// FileSystem fs = config.getTestFileSystem(files, credentials);
//
// // Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
// Path root = fs.getEntryPath().resolve(new RelativePath("xenon_test/links"));
//
// if (!files.exists(root)) {
// throw new Exception("Cannot find symbolic link test dir at " + root.getPath());
// }
//
// // prepare the test files
// boolean v = files.isSymbolicLink(root.resolve(new RelativePath("file0")));
// assertFalse(v);
//
// v = files.isSymbolicLink(root.resolve(new RelativePath("link0")));
// assertTrue(v);
//
// v = files.isSymbolicLink(root.resolve(new RelativePath("file2")));
// assertFalse(v);
//
//
// }
@org.junit.Test
public void test31_newDirectoryStreamWithBrokenLinks() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
Set<Path> tmp = new HashSet<Path>();
tmp.add(file0);
tmp.add(file1);
tmp.add(link0);
tmp.add(link1);
tmp.add(link2);
tmp.add(link3);
tmp.add(link4);
tmp.add(link5);
tmp.add(link6);
test11_newDirectoryStream(root, tmp, false);
}
@org.junit.Test
public void test32_newAttributesDirectoryStreamWithBrokenLinks() throws Exception {
if (!config.supportsSymboliclinks()) {
return;
}
Path cwd = config.getWorkingDir(files, credentials);
// Use external test dir with is assumed to be in fs.getEntryPath().resolve("xenon_test/links");
Path root = resolve(cwd, "xenon_test/links");
if (!files.exists(root)) {
throw new Exception("Cannot find symbolic link test dir at " + root);
}
// prepare the test files
Path file0 = resolve(root, "file0"); // exists
Path file1 = resolve(root, "file1"); // exists
// prepare the test links
Path link0 = resolve(root, "link0"); // points to file0 (contains text)
Path link1 = resolve(root, "link1"); // points to file1 (is empty)
Path link2 = resolve(root, "link2"); // points to non-existing file2
Path link3 = resolve(root, "link3"); // points to link0 which points to file0 (contains text)
Path link4 = resolve(root, "link4"); // points to link2 which points to non-existing file2
Path link5 = resolve(root, "link5"); // points to link6 (circular)
Path link6 = resolve(root, "link6"); // points to link5 (circular)
Set<PathAttributesPair> tmp = new HashSet<PathAttributesPair>();
tmp.add(new PathAttributesPairImplementation(file0, files.getAttributes(file0)));
tmp.add(new PathAttributesPairImplementation(file1, files.getAttributes(file1)));
tmp.add(new PathAttributesPairImplementation(link0, files.getAttributes(link0)));
tmp.add(new PathAttributesPairImplementation(link1, files.getAttributes(link1)));
tmp.add(new PathAttributesPairImplementation(link2, files.getAttributes(link2)));
tmp.add(new PathAttributesPairImplementation(link3, files.getAttributes(link3)));
tmp.add(new PathAttributesPairImplementation(link4, files.getAttributes(link4)));
tmp.add(new PathAttributesPairImplementation(link5, files.getAttributes(link5)));
tmp.add(new PathAttributesPairImplementation(link6, files.getAttributes(link6)));
test15_newAttributesDirectoryStream(root, tmp, false);
}
/*
public Path readSymbolicLink(Path link) throws XenonException;
public boolean isSymbolicLink(Path path) throws XenonException;
*/
@org.junit.Test
public void test33_multipleFileSystemsOpenSimultaneously() throws Exception {
// Open two file systems. They should both be open afterwards.
FileSystem fs0 = files.newFileSystem(config.getScheme(), config.getCorrectLocation(),
config.getDefaultCredential(credentials), null);
FileSystem fs1 = files.newFileSystem(config.getScheme(), config.getCorrectLocation(),
config.getDefaultCredential(credentials), null);
assert (files.isOpen(fs0));
assert (files.isOpen(fs1));
// Close them both. We should get no exceptions.
files.close(fs0);
files.close(fs1);
}
}
| refactored tests: split file isOpen() tests | test/src/nl/esciencecenter/xenon/adaptors/GenericFileAdaptorTestParent.java | refactored tests: split file isOpen() tests | <ide><path>est/src/nl/esciencecenter/xenon/adaptors/GenericFileAdaptorTestParent.java
<ide> }
<ide>
<ide> @org.junit.Test
<del> public void test00_newFileSystem_nullProperties_throwConditionally() throws Exception {
<add> public void test00_newFileSystem_nullProperties_throwIfApplicable() throws Exception {
<ide> // test with correct URI without credential and without properties
<ide> boolean allowNull = config.supportNullCredential();
<ide> test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), null, null, !allowNull);
<ide> if (!config.supportUserInUri()) {
<ide> return;
<ide> }
<add>
<ide> String uriWithUsername = config.getCorrectLocationWithUser();
<ide> test00_newFileSystem(config.getScheme(), uriWithUsername, null, null, false);
<ide> }
<ide> if (!config.supportUserInUri()) {
<ide> return;
<ide> }
<add>
<ide> String uriWithWrongUser = config.getCorrectLocationWithWrongUser();
<ide> test00_newFileSystem(config.getScheme(), uriWithWrongUser, null, null, true);
<ide> }
<ide>
<ide> @org.junit.Test
<ide> public void test00_newFileSystem_nonDefaultCredentialIfSupported_noThrow() throws Exception {
<add> if (!config.supportNonDefaultCredential()) {
<add> return;
<add> }
<add>
<ide> Credential nonDefaultCredential = config.getNonDefaultCredential(credentials);
<del> if (config.supportNonDefaultCredential()) {
<del> test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), nonDefaultCredential, null, false);
<del> }
<add> test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), nonDefaultCredential, null, false);
<ide> }
<ide>
<ide> @org.junit.Test
<ide>
<ide> @org.junit.Test
<ide> public void test00_newFileSystem_correctProperties_noThrow() throws Exception {
<del> if (config.supportsProperties()) {
<del> test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
<del> config.getCorrectProperties(), false);
<del> }
<add> if (!config.supportsProperties()) {
<add> return;
<add> }
<add>
<add> test00_newFileSystem(config.getScheme(), config.getCorrectLocation(), config.getDefaultCredential(credentials),
<add> config.getCorrectProperties(), false);
<ide> }
<ide>
<ide> // ---------------------------------------------------------------------------------------------------------------------------
<ide> }
<ide>
<ide> @org.junit.Test
<del> public void test01_isOpen() throws Exception {
<del>
<del> // test with null filesystem
<add> public void test01_isOpen_fsIsNull_throw() throws Exception {
<ide> test01_isOpen(null, false, true);
<del>
<add> }
<add>
<add> @org.junit.Test
<add> public void test01_isOpen_openFs_true() throws Exception {
<ide> FileSystem fs = config.getTestFileSystem(files, credentials);
<del>
<del> // test with correct open filesystem
<ide> test01_isOpen(fs, true, false);
<del>
<del> if (config.supportsClose()) {
<del> files.close(fs);
<del>
<del> // test with correct closed filesystem
<del> test01_isOpen(fs, false, false);
<del> }
<del>
<add> }
<add>
<add> @org.junit.Test
<add> public void test01_isOpen_closedFsIfSupported_false() throws Exception {
<add> if (!config.supportsClose()) {
<add> return;
<add> }
<add> FileSystem fs = config.getTestFileSystem(files, credentials);
<add> files.close(fs);
<add> test01_isOpen(fs, false, false);
<ide> }
<ide>
<ide> // --------------------------------------------------------------------------------------------------------------------------- |
|
Java | epl-1.0 | edd7af8fefc88fa9f50f0ebe39a9770c3d94f48e | 0 | trona85/GII-17.1B-UBULog-1.0 | /**
*
*/
package controllers;
import java.awt.Desktop;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URL;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.SnapshotParameters;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.control.MenuButton;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SelectionMode;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javafx.scene.web.WebEngine;
import javafx.scene.web.WebView;
import javafx.stage.FileChooser;
import javafx.stage.Modality;
import javafx.stage.Stage;
import model.Chart;
import model.EnrolledUser;
import model.Group;
import model.Log;
import model.Role;
import model.TableLog;
import parserdocument.CsvParser;
import ubulogexception.UBULogError;
import ubulogexception.UBULogException;
import webservice.CourseWS;
/**
* Clase controlador de la ventana principal
*
* @author Oscar Fernández Armengol
* @author Claudia Martínez Herrero
*
* @version 1.1
*/
public class MainController implements Initializable {
static final Logger logger = LoggerFactory.getLogger(MainController.class);
private String all = "Todos";
@FXML // Curso actual
public Label lblActualCourse;
@FXML // Usuario actual
public Label lblActualUser;
@FXML // Host actual
public Label lblActualHost;
@FXML // Numero de participantes
public Label lblCountParticipants;
@FXML // lista de participantes
public ListView<EnrolledUser> listParticipants;
ObservableList<EnrolledUser> enrList;
@FXML // lista de eventos
public ListView<model.Event> listEvents;
ObservableList<model.Event> eventList;
@FXML // Botón filtro por rol
public MenuButton slcRole;
MenuItem[] roleMenuItems;
String filterRole = all;
@FXML // Botón filtro por grupo
public MenuButton slcGroup;
MenuItem[] groupMenuItems;
String filterGroup = all;
@FXML // Botón selector gráfico
public MenuButton slcChart;
@FXML // Entrada de filtro de usuarios por patrón
public TextField tfdParticipants;
String patternParticipants = "";
@FXML // Entrada de filtro de usuarios por patrón
public Button btnchart;
@FXML // Entrada de filtro de actividades por patrón
public TextField tfdEvents;
String patternEvents = "";
@FXML // Entrada de filtro de actividades por patrón
public TextField tfdDate;
String patternDate = "";
@FXML
public TextField tfdNameUser;
String patternNameUser = "";
@FXML
public TextField tfdUserAffected;
String patternUserAffected = "";
@FXML
public TextField tfdContext;
String patternContext = "";
@FXML
public TextField tfdComponent;
String patternComponent = "";
@FXML
public TextField tfdEvent;
String patternEvent = "";
@FXML
public TextField tfdDescription;
String patternDescription = "";
@FXML
public TextField tfdPOrigin;
String patternOrigin = "";
@FXML
public TextField tfdIp;
String patternIp = "";
@FXML // chart ,imagen log
private WebView chart;
private WebEngine engineChart;
@FXML // chart ,imagen log
private WebView tableLogs;
private WebEngine engineTableLogs;
@FXML
private WebView imageLoger;
private List<EnrolledUser> users;
private CsvParser logs;
private ArrayList<Log> filterLogs;
private ArrayList<Log> filterTableLogs;
private Chart viewchart;
private TableLog viewTableLog;
private ObservableList<model.Event> selectedEvents = null;
private ObservableList<EnrolledUser> selectedParticipants = null;
/**
* Muestra los usuarios matriculados en el curso, así como las actividades
* de las que se compone.
*/
public void initialize(URL location, ResourceBundle resources) {
filterLogs = new ArrayList<>();
filterTableLogs = new ArrayList<>();
try {
logger.info(" Cargando curso '" + UBULog.getSession().getActualCourse().getFullName() + "'...");
viewchart = new Chart();
viewTableLog = new TableLog();
setDisableComponentInterfaz(true);
engineChart = chart.getEngine();
engineTableLogs = tableLogs.getEngine();
loadHTML(new ArrayList<Log>());
viewHTML();
// Establecemos los usuarios matriculados
CourseWS.setEnrolledUsers(UBULog.getSession().getToken(), UBULog.getSession().getActualCourse());
// Almacenamos todos los participantes en una lista
users = UBULog.getSession().getActualCourse().getEnrolledUsers();
// insertamos los usuarios ficticios.
insertUserFicticios();
enrList = FXCollections.observableArrayList(users);
listParticipants.setItems(enrList);
//////////////////////////////////////////////////////////////////////////
// Manejo de roles
manejoRoles();
//////////////////////////////////////////////////////////////////////////
// Manejo de grupos (MenuButton Grupo):
manejoGrupos();
// Mostramos número participantes
lblCountParticipants.setText("Participantes: " + users.size());
// Inicializamos el listener del textField de participantes
tfdParticipants.setOnAction(inputParticipant());
// Inicializamos el listener del textField del calificador
tfdEvents.setOnAction(inputEvent());
// Inicializamos el listener del textField filtros de la tabla log
tfdDate.setOnAction(inputTable());
tfdNameUser.setOnAction(inputTable());
tfdUserAffected.setOnAction(inputTable());
tfdContext.setOnAction(inputTable());
tfdComponent.setOnAction(inputTable());
tfdEvent.setOnAction(inputTable());
tfdDescription.setOnAction(inputTable());
tfdPOrigin.setOnAction(inputTable());
tfdIp.setOnAction(inputTable());
EventHandler<ActionEvent> actionChart = selectChart();
ArrayList<MenuItem> groupsItemsList = new ArrayList<>();
typeChart(actionChart, groupsItemsList, "Vertical", "bar");
typeChart(actionChart, groupsItemsList, "Hotizontal", "horizontalBar");
typeChart(actionChart, groupsItemsList, "Lineas basicas", "line");
// Asignamos la lista de MenuItems al MenuButton "Grupo"
slcChart.getItems().addAll(groupsItemsList);
dataUserLoger();
} catch (Exception e) {
logger.error("Error en la inicialización. {}", e);
}
// Asignamos el manejador de eventos de la lista
// Al clickar en la lista, se recalcula el número de elementos
// seleccionados de participantes.
listParticipants.setOnMouseClicked(new EventHandler<Event>() {
@Override
public void handle(Event event) {
filterLogs();
}
});
// Asignamos el manejador de eventos de la lista
// Al clickar en la lista, se recalcula el número de elementos
// seleccionados de eventos.
listEvents.setOnMouseClicked(new EventHandler<Event>() {
@Override
public void handle(Event event) {
filterLogs();
}
});
}
/**
* Metodo para añadir tipo de gráfico al selector.
*
* @param actionChart,
* actionChart.
* @param groupsItemsList,
* groupsItemsList.
* @param value,
* valor.
* @param key,
* clave.
*/
private void typeChart(EventHandler<ActionEvent> actionChart, ArrayList<MenuItem> groupsItemsList, String value,
String key) {
MenuItem mi = new MenuItem(value);
mi.setId(key);
mi.setOnAction(actionChart);
groupsItemsList.add(mi);
}
/**
* Método que recarga de nuevo el gráfico y la tabla.
*
* @param list
* , lista de log.
*/
private void loadHTML(List<Log> list) {
viewchart.generarGrafica();
engineChart.reload();
viewTableLog.generarTablaLogs(list);
engineTableLogs.reload();
}
/**
* Método que carga los html de la gráfica y la tabla de logs.
*/
private void viewHTML() {
engineChart.load(getClass().getResource("/chart/html/chart.html").toString());
engineTableLogs.load(getClass().getResource("/tablelogs/html/tablelogs.html").toString());
}
private void filterLogs() {
selectedEvents = listEvents.getSelectionModel().getSelectedItems();
selectedParticipants = listParticipants.getSelectionModel().getSelectedItems();
filterLogs.clear();
viewchart.getLabel().clear();
if (!selectedEvents.isEmpty()) {
for (model.Event actualEvent : selectedEvents) {
filterLogs.addAll(actualEvent.getLogsEvent());
}
if (!selectedParticipants.isEmpty()) {
boolean control = false;
ArrayList<Log> filterAux = new ArrayList<>();
filterAux.addAll(filterLogs);
for (Log actualLog : filterAux) {
for (EnrolledUser participant : selectedParticipants) {
if (actualLog.getUser().equals(participant)) {
control = true;
}
}
if (!control) {
filterLogs.remove(actualLog);
}
control = false;
}
}
} else {
for (EnrolledUser actualUser : selectedParticipants) {
for (Log actualLog : logs.getLogs()) {
if (actualLog.getUser().equals(actualUser)) {
filterLogs.add(actualLog);
}
}
}
}
viewchart.setLabel(selectedParticipants, selectedEvents, filterLogs);
loadHTML(filterLogs);
}
/**
* Establecemos los valores de los lavel que hacen referencia a los datos
* del usuario logeado.
*/
private void dataUserLoger() {
WebEngine engineImagen;
lblActualUser.setText("Usuario: " + UBULog.getUser().getFullName());
lblActualCourse.setText("Curso actual: " + UBULog.getSession().getActualCourse().getFullName());
lblActualHost.setText("Host: " + UBULog.getHost());
engineImagen = imageLoger.getEngine();
engineImagen.load(UBULog.getUser().getProfileImageUrlSmall());
}
/**
* Método para crear usuarios que no estan inscritos al curso, pero pueden
* tener interacciones en el.
*/
private void insertUserFicticios() {
EnrolledUser userCreate = new EnrolledUser("Administrador", 2);
userCreate.setlastName("Administrador");
users.add(userCreate);
userCreate = new EnrolledUser("Invitado", 1);
userCreate.setlastName("Invitado");
users.add(userCreate);
userCreate = new EnrolledUser("Sistema", 0);
userCreate.setlastName("Sistema");
users.add(userCreate);
userCreate = new EnrolledUser("Desconocido", -1);
userCreate.setlastName("Desconocido");
users.add(userCreate);
}
/**
*
*/
private void manejoGrupos() {
EventHandler<ActionEvent> actionGroup = selectGroup();
// Cargamos una lista de los nombres de los grupos
List<String> groupsList = UBULog.getSession().getActualCourse().getGroups();
// Convertimos la lista a una lista de MenuItems para el MenuButton
ArrayList<MenuItem> groupsItemsList = new ArrayList<>();
// En principio mostrarán todos los usuarios en cualquier grupo
MenuItem mi = (new MenuItem(all));
// Añadimos el manejador de eventos al primer MenuItem
mi.setOnAction(actionGroup);
groupsItemsList.add(mi);
for (int i = 0; i < groupsList.size(); i++) {
String group = groupsList.get(i);
mi = (new MenuItem(group));
// Añadimos el manejador de eventos a cada MenuItem
mi.setOnAction(actionGroup);
groupsItemsList.add(mi);
}
// Asignamos la lista de MenuItems al MenuButton "Grupo"
slcGroup.getItems().addAll(groupsItemsList);
slcGroup.setText(all);
}
/**
* Manejo de roles
*/
private void manejoRoles() {
EventHandler<ActionEvent> actionRole = selectRole();
// Cargamos una lista con los nombres de los roles
List<String> rolesList = UBULog.getSession().getActualCourse().getRoles();
// Convertimos la lista a una lista de MenuItems para el MenuButton
ArrayList<MenuItem> rolesItemsList = new ArrayList<>();
// En principio se mostrarón todos los usuarios con cualquier rol
MenuItem mi = (new MenuItem(all));
// Añadimos el manejador de eventos al primer MenuItem
mi.setOnAction(actionRole);
rolesItemsList.add(mi);
for (int i = 0; i < rolesList.size(); i++) {
String rol = rolesList.get(i);
mi = (new MenuItem(rol));
mi.setOnAction(actionRole);
// Añadimos el manejador de eventos a cada MenuItem
rolesItemsList.add(mi);
}
// Asignamos la lista de MenuItems al MenuButton "Rol"
slcRole.getItems().addAll(rolesItemsList);
slcRole.setText(all);
}
/**
* Manejador de eventos para el botón de filtro por roles. Devuelve un
* manejador de eventos para cada item.
*
* @return manejador de eventos de roles
*/
private EventHandler<ActionEvent> selectRole() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
// Obtenemos el ítem que se ha seleccionado
MenuItem mItem = (MenuItem) event.getSource();
// Obtenemos el rol por el que se quiere filtrar
filterRole = mItem.getText();
logger.info("-> Filtrando participantes por rol: " + filterRole);
filterParticipants();
slcRole.setText(filterRole);
}
};
}
/**
* Manejador de eventos para el botón de filtro por grupos. Devuelve un
* manejador de eventos para cada item.
*
* @return manejador de eventos de grupos
*/
private EventHandler<ActionEvent> selectGroup() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
// Obtenemos el ítem que se ha seleccionado
MenuItem mItem = (MenuItem) event.getSource();
// Obtenemos el grupo por el que se quire filtrar
filterGroup = mItem.getText();
logger.info("-> Filtrando participantes por grupo: " + filterGroup);
filterParticipants();
slcGroup.setText(filterGroup);
}
};
}
/**
* Manejador de eventos para el textField de filtro de participantes.
*
* @return manejador de eventos para el patrón de participantes
*/
private EventHandler<ActionEvent> inputParticipant() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
patternParticipants = tfdParticipants.getText();
logger.info("-> Filtrando participantes por nombre: " + patternParticipants);
filterParticipants();
}
};
}
/**
* Manejador de eventos para el botón selector de gráficos, selecciona un
* tipo de gráfico.
*
* @return manejador de eventos de grupos
*/
private EventHandler<ActionEvent> selectChart() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y cambia el tipo de
* gráfico.
*/
public void handle(ActionEvent chart) {
MenuItem mItem = (MenuItem) chart.getSource();
viewchart.setTypeChart(mItem.getId());
viewchart.generarGrafica();
engineChart.reload();
}
};
}
/**
* Filtra los participantes según el rol, el grupo y el patrón indicados
*/
public void filterParticipants() {
try {
boolean roleYes;
boolean groupYes;
boolean patternYes;
users = UBULog.getSession().getActualCourse().getEnrolledUsers();
// Cargamos la lista de los roles
ArrayList<EnrolledUser> nameUsers = new ArrayList<>();
// Obtenemos los participantes que tienen el rol elegido
for (int i = 0; i < users.size(); i++) {
// Filtrado por rol:
roleYes = false;
List<Role> roles = users.get(i).getRoles();
// Si no tiene rol
if (roles == null || (roles.isEmpty() && filterRole.equals(all))) {
roleYes = true;
} else {
for (int j = 0; j < roles.size(); j++) {
// Comprobamos si el usuario pasa el filtro de "rol"
if (roles.get(j).getName().equals(filterRole) || filterRole.equals(all)) {
roleYes = true;
}
}
}
// Filtrado por grupo:
groupYes = false;
List<Group> groups = users.get(i).getGroups();
if (groups == null || (groups.isEmpty() && filterGroup.equals(all))) {
groupYes = true;
} else {
for (int k = 0; k < groups.size(); k++) {
// Comprobamos si el usuario pasa el filtro de "grupo"
if (groups.get(k).getName().equals(filterGroup) || filterGroup.equals(all)) {
groupYes = true;
}
}
}
// Filtrado por patrón:
patternYes = false;
if (patternParticipants.equals("")) {
patternYes = true;
} else {
Pattern pattern = Pattern.compile(patternParticipants);
Matcher match = pattern.matcher(users.get(i).getFullName());
if (match.find()) {
patternYes = true;
}
}
// Si el usuario se corresponde con los filtros
if (groupYes && roleYes && patternYes)
nameUsers.add(users.get(i));
}
enrList = FXCollections.observableArrayList(nameUsers);
// Mostramos nievo número participantes
lblCountParticipants.setText("Participantes: " + nameUsers.size());
} catch (Exception e) {
logger.error("Error en el filtro participantes. {}", e);
}
listParticipants.setItems(enrList);
}
public EventHandler<ActionEvent> inputTable() {
return new EventHandler<ActionEvent>() {
public void handle(ActionEvent event) {
ArrayList<String> patternFilter = new ArrayList<>();
patternFilter.add(tfdDate.getText());
patternFilter.add(tfdNameUser.getText().toUpperCase());
patternFilter.add(tfdUserAffected.getText().toUpperCase());
patternFilter.add(tfdContext.getText().toUpperCase());
patternFilter.add(tfdComponent.getText().toUpperCase());
patternFilter.add(tfdEvent.getText().toUpperCase());
patternFilter.add(tfdDescription.getText().toUpperCase());
patternFilter.add(tfdPOrigin.getText().toUpperCase());
patternFilter.add(tfdIp.getText());
logger.info("-> Filtrando tabla: \n Fecha :" + patternFilter.get(0) + "\n Usuario afectado: "
+ patternFilter.get(1) + "\n usuario afectado: " + patternFilter.get(2) + "\n contexto: "
+ patternFilter.get(3) + "\n Componente: " + patternFilter.get(4) + "\n Evento: "
+ patternFilter.get(5) + "\n Descripción: " + patternFilter.get(6) + "\n Origen: "
+ patternFilter.get(7) + "\n ip: " + patternFilter.get(8));
filterTable(patternFilter);
}
};
}
protected void filterTable(ArrayList<String> patternFilter) {
ArrayList<Boolean> patterncomp = new ArrayList<>();
for (int i = 0; i < 9; ++i) {
patterncomp.add(false);
}
filterTableLogs.clear();
try {
if (filterLogs.isEmpty()) {
// buscar en log completo
filtroTableLogs(patternFilter, patterncomp, logs.getLogs());
} else {
// buscar en filterlog
filtroTableLogs(patternFilter, patterncomp, filterLogs);
}
viewTableLog.generarTablaLogs(filterTableLogs);
engineTableLogs.reload();
} catch (Exception e) {
logger.error("Error en el filtro de tabla. {}", e);
}
}
/**
* Método que filtra los log de la tabla.
*
* @param patternFilter,
* Lista del contenido puesto en los filtros
* @param patterncomp,
* lista de booleanos.
* @param ftLogs,
* logs filtrados.
*/
private void filtroTableLogs(ArrayList<String> patternFilter, ArrayList<Boolean> patterncomp, List<Log> list) {
for (int i = 0; i < list.size(); i++) {
for (int j = 0; j < patternFilter.size(); j++) {
if (patternFilter.get(j).equals("")) {
patterncomp.set(j, true);
} else {
Pattern pattern = Pattern.compile(patternFilter.get(j));
Matcher match = null;
switch (j) {
case 0:
match = pattern.matcher(list.get(i).getDate().get(Calendar.DAY_OF_MONTH) + "/"
+ (list.get(i).getDate().get(Calendar.MONTH) + 1) + "/"
+ list.get(i).getDate().get(Calendar.YEAR) + " "
+ list.get(i).getDate().get(Calendar.HOUR_OF_DAY) + ":"
+ list.get(i).getDate().get(Calendar.MINUTE));
break;
case 1:
match = pattern.matcher(list.get(i).getNameUser().toUpperCase());
break;
case 2:
match = pattern.matcher(list.get(i).getUserAffected().toUpperCase());
break;
case 3:
match = pattern.matcher(list.get(i).getContext().toUpperCase());
break;
case 4:
match = pattern.matcher(list.get(i).getComponent().toUpperCase());
break;
case 5:
match = pattern.matcher(list.get(i).getEvent().toUpperCase());
break;
case 6:
match = pattern.matcher(list.get(i).getDescription().toUpperCase());
break;
case 7:
match = pattern.matcher(list.get(i).getOrigin().toUpperCase());
break;
case 8:
match = pattern.matcher(list.get(i).getIp().toUpperCase());
break;
default:
match = pattern.matcher("*");
break;
}
if (match.find()) {
patterncomp.set(j, true);
}
}
}
if (patterncomp.get(0) && patterncomp.get(1) && patterncomp.get(2) && patterncomp.get(3)
&& patterncomp.get(4) && patterncomp.get(5) && patterncomp.get(6) && patterncomp.get(7)
&& patterncomp.get(8)) {
filterTableLogs.add(list.get(i));
}
for (int k = 0; k < patterncomp.size(); k++) {
patterncomp.set(k, false);
}
}
}
/**
* Manejador de eventos para las actividades. Devuelve un manejador de
* eventos para cada item.
*
* @return manejador de eventos para las actividades
*/
public EventHandler<ActionEvent> inputEvent() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un TreeItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
patternEvents = tfdEvents.getText();
logger.info("-> Filtrando calificador por nombre: " + patternEvents);
filterEvents();
}
};
}
/**
* Filtra la lista de actividades del calificador según el tipo y el patrón
* introducidos.
*/
public void filterEvents() {
try {
boolean patternYes;
ArrayList<model.Event> filterevents = new ArrayList<>();
eventList = FXCollections.observableArrayList(logs.getEvents().values());
// Obtenemos los participantes que tienen el rol elegido
for (int i = 0; i < eventList.size(); i++) {
// Filtrado por patrón:
patternYes = false;
if (patternEvents.equals("")) {
patternYes = true;
} else {
Pattern pattern = Pattern.compile(patternEvents);
Matcher match = pattern.matcher(eventList.get(i).getNameEvent());
if (match.find()) {
patternYes = true;
}
}
// Si el usuario se corresponde con los filtros
if (patternYes)
filterevents.add(eventList.get(i));
}
eventList = FXCollections.observableArrayList(filterevents);
} catch (Exception e) {
logger.error("Error en filtro de eventos {}", e);
}
listEvents.setItems(eventList);
}
/**
* Cambia la asignatura actual y carga otra
*
* @throws Exception
* , escepción.
*/
public void changeCourse() {
logger.info("Cambiando de asignatura...");
changeView("/view/Welcome.fxml");
logger.info("Accediendo a UBULog...");
}
private void clearData() {
listParticipants.getSelectionModel().clearSelection();
listEvents.getSelectionModel().clearSelection();
filterLogs.clear();
viewchart.getDate().clear();
viewchart.getLabel().clear();
logs = null;
users.clear();
filterTableLogs.clear();
loadHTML(new ArrayList<>());
}
/**
* Exporta el gráfico. El usuario podrá elegir entre el formato .png o .jpg
* para guardar la imagen.
*
* @throws Exception
* excepción
*/
public void saveChart() {
WritableImage image = chart.snapshot(new SnapshotParameters(), null);
File file = new File("chart.png");
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Guardar gráfico");
fileChooser.setInitialFileName("chart");
fileChooser.setInitialDirectory(file.getParentFile());
fileChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter(".png", "*.*"),
new FileChooser.ExtensionFilter("*.jpg", "*.jpg"), new FileChooser.ExtensionFilter("*.png", "*.png"));
try {
file = fileChooser.showSaveDialog(UBULog.getStage());
if (file != null) {
ImageIO.write(SwingFXUtils.fromFXImage(image, null), "png", file);
}
} catch (Exception e) {
logger.error("Error en guardado de gráfico. {}", e);
}
}
/**
* Método para generar gráfica dependiente de la tabla.
*/
public void generateChart() {
viewchart.setLabel(selectedParticipants, selectedEvents, filterTableLogs);
viewchart.generarGrafica();
engineChart.reload();
}
/**
* Vuelve a la ventana de login de usuario
*
* @throws Exception
* excepción
*/
public void logOut() {
logger.info("Cerrando sesión de usuario");
changeView("/view/Login.fxml");
}
/**
* Metodo que cambia la ventana cuando cambias de asignatura o te deslogeas.
*
* @throws IOException
* excepción.
*/
private void changeView(String resource) {
try {
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource(resource));
UBULog.getStage().close();
UBULog.setStage(new Stage());
Parent root = loader.load();
Scene scene = new Scene(root);
UBULog.getStage().setScene(scene);
UBULog.getStage().getIcons().add(new Image("/img/logo_min.png"));
UBULog.getStage().setTitle("UBULog");
UBULog.getStage().show();
clearData();
} catch (IOException e) {
logger.error("Error al cambiar asignatira. {}", e);
}
}
/**
* Deja de seleccionar los participantes/actividades y borra el gráfico.
*
* @param actionEvent,
* acción del evento.
* @throws Exception
* excepción
*/
public void clearSelection() {
if (logs != null)
if (!logs.getLogs().isEmpty()) {
listParticipants.getSelectionModel().clearSelection();
listEvents.getSelectionModel().clearSelection();
filterLogs.clear();
viewchart.getDate().clear();
viewchart.getLabel().clear();
loadHTML(logs.getLogs());
}
}
/**
* Abre en el navegador el repositorio del proyecto.
*
* @throws Exception
* excepción
*/
public void aboutUBULog() {
try {
Desktop.getDesktop().browse(new URL("https://github.com/trona85/GII-17.1B-UBULog-1.0").toURI());
} catch (Exception e) {
logger.error("Error al abrir GigHub. {}", e);
}
}
/**
* Boton para cargar documento
*
* @param actionEvent,
* acción del evento.
*/
public void cargaDocumento() {
try {
this.logs = new CsvParser();
FileChooser fileChooser = new FileChooser();
File file = fileChooser.showOpenDialog(UBULog.getStage());
if (file == null) {
throw new UBULogException(UBULogError.FICHERO_CANCELADO);
}
if (!file.toString().contains(".csv")) {
throw new UBULogException(UBULogError.FICHERO_NO_VALIDO);
}
Alert alert = modalOpen();
logs.setFile(file.toString());
logs.readDocument();
initializeDataSet(logs);
alert.close();
} catch (UBULogException e) {
logger.info(e.getMessage());
if (e.getError() != UBULogError.FICHERO_CANCELADO) {
cargaDocumento();
}
}
}
/**
* Método que carga un modal.
*
* @return alert.
*/
private Alert modalOpen() {
Alert alert = new Alert(AlertType.INFORMATION);
alert.setHeight(300);
alert.setWidth(300);
alert.initModality(Modality.APPLICATION_MODAL);
alert.initOwner(UBULog.getStage());
alert.getDialogPane().setContentText("Se esta cargando el registro de la asignatura:\n"
+ UBULog.getSession().getActualCourse().getFullName() + "\nPuede tardar unos minutos");
alert.show();
return alert;
}
/**
* Boton para cargar documento online.
*
* @throws IOException
* excepción
*/
public void cargaDocumentoOnline() throws IOException {
this.logs = new CsvParser();
WebScripting webScripting = null;
PrintWriter pw = null;
File file = null;
try {
Alert alert = modalOpen();
webScripting = new WebScripting();
webScripting.getResponsiveWeb();
try (FileWriter fileWriter = new FileWriter("./tempcsv.csv")) {
pw = new PrintWriter(fileWriter);
pw.print(webScripting.getResponsive());
} finally {
if (pw != null) {
pw.close();
}
}
file = new File("tempcsv.csv");
logs.setFile(file.getAbsolutePath());
logs.readDocument();
initializeDataSet(logs);
alert.close();
Files.delete(file.toPath());
} catch (FailingHttpStatusCodeException e) {
logger.error(e.getMessage());
} catch (UBULogException e) {
logger.info(e.getMessage());
} finally {
if (webScripting != null) {
webScripting.close();
}
}
}
/**
* Inicializamos los datos necesarios.
*
* @param logs,
* logs.
*/
private void initializeDataSet(CsvParser logs) {
setDisableComponentInterfaz(false);
listParticipants.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
listEvents.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
/// Mostramos la lista de participantes y eventos
eventList = FXCollections.observableArrayList(logs.getEvents().values());
Collections.sort(eventList, (o1, o2) -> o1.getNameEvent().compareTo(o2.getNameEvent()));
listEvents.setItems(eventList);
loadHTML(logs.getLogs());
}
/**
* Método para desactivar o activar botones de la interfaz.
*
* @param disable,
* booleano.
*
*/
private void setDisableComponentInterfaz(boolean disable) {
listParticipants.setDisable(disable);
tfdParticipants.setDisable(disable);
tfdEvents.setDisable(disable);
listEvents.setDisable(disable);
btnchart.setDisable(disable);
tfdDate.setDisable(disable);
tfdNameUser.setDisable(disable);
tfdUserAffected.setDisable(disable);
tfdContext.setDisable(disable);
tfdComponent.setDisable(disable);
tfdEvent.setDisable(disable);
tfdDescription.setDisable(disable);
tfdPOrigin.setDisable(disable);
tfdIp.setDisable(disable);
}
/**
*
* Botón "Salir". Cierra la aplicación.
*
* @param actionEvent,
* acción del evento.
* @throws Exception
* excepción
*/
public void closeApplication() {
logger.info("Cerrando aplicación");
UBULog.getStage().close();
}
public static void errorDeConexion() {
Alert alert = new Alert(AlertType.ERROR);
alert.initModality(Modality.APPLICATION_MODAL);
alert.initOwner(UBULog.getStage());
alert.getDialogPane().setContentText("Su equipo ha perdido la conexión a Internet");
logger.warn("Su equipo ha perdido la conexión a Internet");
ButtonType buttonSalir = new ButtonType("Cerrar UBULog");
alert.getButtonTypes().setAll(buttonSalir);
Optional<ButtonType> result = alert.showAndWait();
if (result.get() == buttonSalir)
UBULog.getStage().close();
}
} | src/main/java/controllers/MainController.java | /**
*
*/
package controllers;
import java.awt.Desktop;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.imageio.ImageIO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.SnapshotParameters;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Label;
import javafx.scene.control.ListView;
import javafx.scene.control.MenuButton;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SelectionMode;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.WritableImage;
import javafx.scene.web.WebEngine;
import javafx.scene.web.WebView;
import javafx.stage.FileChooser;
import javafx.stage.Modality;
import javafx.stage.Stage;
import model.Chart;
import model.EnrolledUser;
import model.Group;
import model.Log;
import model.Role;
import model.TableLog;
import parserdocument.CsvParser;
import ubulogexception.UBULogError;
import ubulogexception.UBULogException;
import webservice.CourseWS;
/**
* Clase controlador de la ventana principal
*
* @author Oscar Fernández Armengol
* @author Claudia Martínez Herrero
*
* @version 1.1
*/
public class MainController implements Initializable {
static final Logger logger = LoggerFactory.getLogger(MainController.class);
private String all = "Todos";
@FXML // Curso actual
public Label lblActualCourse;
@FXML // Usuario actual
public Label lblActualUser;
@FXML // Host actual
public Label lblActualHost;
@FXML // Numero de participantes
public Label lblCountParticipants;
@FXML // lista de participantes
public ListView<EnrolledUser> listParticipants;
ObservableList<EnrolledUser> enrList;
@FXML // lista de eventos
public ListView<model.Event> listEvents;
ObservableList<model.Event> eventList;
@FXML // Botón filtro por rol
public MenuButton slcRole;
MenuItem[] roleMenuItems;
String filterRole = all;
@FXML // Botón filtro por grupo
public MenuButton slcGroup;
MenuItem[] groupMenuItems;
String filterGroup = all;
@FXML // Botón selector gráfico
public MenuButton slcChart;
@FXML // Entrada de filtro de usuarios por patrón
public TextField tfdParticipants;
String patternParticipants = "";
@FXML // Entrada de filtro de usuarios por patrón
public Button btnchart;
@FXML // Entrada de filtro de actividades por patrón
public TextField tfdEvents;
String patternEvents = "";
@FXML // Entrada de filtro de actividades por patrón
public TextField tfdDate;
String patternDate = "";
@FXML
public TextField tfdNameUser;
String patternNameUser = "";
@FXML
public TextField tfdUserAffected;
String patternUserAffected = "";
@FXML
public TextField tfdContext;
String patternContext = "";
@FXML
public TextField tfdComponent;
String patternComponent = "";
@FXML
public TextField tfdEvent;
String patternEvent = "";
@FXML
public TextField tfdDescription;
String patternDescription = "";
@FXML
public TextField tfdPOrigin;
String patternOrigin = "";
@FXML
public TextField tfdIp;
String patternIp = "";
@FXML // chart ,imagen log
private WebView chart;
private WebEngine engineChart;
@FXML // chart ,imagen log
private WebView tableLogs;
private WebEngine engineTableLogs;
@FXML
private WebView imageLoger;
private List<EnrolledUser> users;
private CsvParser logs;
private ArrayList<Log> filterLogs;
private ArrayList<Log> filterTableLogs;
private Chart viewchart;
private TableLog viewTableLog;
private ObservableList<model.Event> selectedEvents = null;
private ObservableList<EnrolledUser> selectedParticipants = null;
/**
* Muestra los usuarios matriculados en el curso, así como las actividades
* de las que se compone.
*/
public void initialize(URL location, ResourceBundle resources) {
filterLogs = new ArrayList<>();
filterTableLogs = new ArrayList<>();
try {
logger.info(" Cargando curso '" + UBULog.getSession().getActualCourse().getFullName() + "'...");
viewchart = new Chart();
viewTableLog = new TableLog();
setDisableComponentInterfaz(true);
engineChart = chart.getEngine();
engineTableLogs = tableLogs.getEngine();
loadHTML(new ArrayList<Log>());
viewHTML();
// Establecemos los usuarios matriculados
CourseWS.setEnrolledUsers(UBULog.getSession().getToken(), UBULog.getSession().getActualCourse());
// Almacenamos todos los participantes en una lista
users = UBULog.getSession().getActualCourse().getEnrolledUsers();
// insertamos los usuarios ficticios.
insertUserFicticios();
enrList = FXCollections.observableArrayList(users);
listParticipants.setItems(enrList);
//////////////////////////////////////////////////////////////////////////
// Manejo de roles
manejoRoles();
//////////////////////////////////////////////////////////////////////////
// Manejo de grupos (MenuButton Grupo):
manejoGrupos();
// Mostramos número participantes
lblCountParticipants.setText("Participantes: " + users.size());
// Inicializamos el listener del textField de participantes
tfdParticipants.setOnAction(inputParticipant());
// Inicializamos el listener del textField del calificador
tfdEvents.setOnAction(inputEvent());
// Inicializamos el listener del textField filtros de la tabla log
tfdDate.setOnAction(inputTable());
tfdNameUser.setOnAction(inputTable());
tfdUserAffected.setOnAction(inputTable());
tfdContext.setOnAction(inputTable());
tfdComponent.setOnAction(inputTable());
tfdEvent.setOnAction(inputTable());
tfdDescription.setOnAction(inputTable());
tfdPOrigin.setOnAction(inputTable());
tfdIp.setOnAction(inputTable());
EventHandler<ActionEvent> actionChart = selectChart();
ArrayList<MenuItem> groupsItemsList = new ArrayList<>();
typeChart(actionChart, groupsItemsList, "Vertical", "bar");
typeChart(actionChart, groupsItemsList, "Hotizontal", "horizontalBar");
typeChart(actionChart, groupsItemsList, "Lineas basicas", "line");
// Asignamos la lista de MenuItems al MenuButton "Grupo"
slcChart.getItems().addAll(groupsItemsList);
dataUserLoger();
} catch (Exception e) {
logger.error("Error en la inicialización. {}", e);
}
// Asignamos el manejador de eventos de la lista
// Al clickar en la lista, se recalcula el número de elementos
// seleccionados de participantes.
listParticipants.setOnMouseClicked(new EventHandler<Event>() {
@Override
public void handle(Event event) {
filterLogs();
}
});
// Asignamos el manejador de eventos de la lista
// Al clickar en la lista, se recalcula el número de elementos
// seleccionados de eventos.
listEvents.setOnMouseClicked(new EventHandler<Event>() {
@Override
public void handle(Event event) {
filterLogs();
}
});
}
/**
* Metodo para añadir tipo de gráfico al selector.
*
* @param actionChart,
* actionChart.
* @param groupsItemsList,
* groupsItemsList.
* @param value,
* valor.
* @param key,
* clave.
*/
private void typeChart(EventHandler<ActionEvent> actionChart, ArrayList<MenuItem> groupsItemsList, String value,
String key) {
MenuItem mi = new MenuItem(value);
mi.setId(key);
mi.setOnAction(actionChart);
groupsItemsList.add(mi);
}
/**
* Método que recarga de nuevo el gráfico y la tabla.
*
* @param list
* , lista de log.
*/
private void loadHTML(List<Log> list) {
viewchart.generarGrafica();
engineChart.reload();
viewTableLog.generarTablaLogs(list);
engineTableLogs.reload();
}
/**
* Método que carga los html de la gráfica y la tabla de logs.
*/
private void viewHTML() {
engineChart.load(getClass().getResource("/chart/html/chart.html").toString());
engineTableLogs.load(getClass().getResource("/tablelogs/html/tablelogs.html").toString());
}
private void filterLogs() {
selectedEvents = listEvents.getSelectionModel().getSelectedItems();
selectedParticipants = listParticipants.getSelectionModel().getSelectedItems();
filterLogs.clear();
viewchart.getLabel().clear();
if (!selectedEvents.isEmpty()) {
for (model.Event actualEvent : selectedEvents) {
filterLogs.addAll(actualEvent.getLogsEvent());
}
if (!selectedParticipants.isEmpty()) {
boolean control = false;
ArrayList<Log> filterAux = new ArrayList<>();
filterAux.addAll(filterLogs);
for (Log actualLog : filterAux) {
for (EnrolledUser participant : selectedParticipants) {
if (actualLog.getUser().equals(participant)) {
control = true;
}
}
if (!control) {
filterLogs.remove(actualLog);
}
control = false;
}
}
} else {
for (EnrolledUser actualUser : selectedParticipants) {
for (Log actualLog : logs.getLogs()) {
if (actualLog.getUser().equals(actualUser)) {
filterLogs.add(actualLog);
}
}
}
}
viewchart.setLabel(selectedParticipants, selectedEvents, filterLogs);
loadHTML(filterLogs);
}
/**
* Establecemos los valores de los lavel que hacen referencia a los datos
* del usuario logeado.
*/
private void dataUserLoger() {
WebEngine engineImagen;
lblActualUser.setText("Usuario: " + UBULog.getUser().getFullName());
lblActualCourse.setText("Curso actual: " + UBULog.getSession().getActualCourse().getFullName());
lblActualHost.setText("Host: " + UBULog.getHost());
engineImagen = imageLoger.getEngine();
engineImagen.load(UBULog.getUser().getProfileImageUrlSmall());
}
/**
* Método para crear usuarios que no estan inscritos al curso, pero pueden
* tener interacciones en el.
*/
private void insertUserFicticios() {
EnrolledUser userCreate = new EnrolledUser("Administrador", 2);
userCreate.setlastName("Administrador");
users.add(userCreate);
userCreate = new EnrolledUser("Invitado", 1);
userCreate.setlastName("Invitado");
users.add(userCreate);
userCreate = new EnrolledUser("Sistema", 0);
userCreate.setlastName("Sistema");
users.add(userCreate);
userCreate = new EnrolledUser("Desconocido", -1);
userCreate.setlastName("Desconocido");
users.add(userCreate);
}
/**
*
*/
private void manejoGrupos() {
EventHandler<ActionEvent> actionGroup = selectGroup();
// Cargamos una lista de los nombres de los grupos
List<String> groupsList = UBULog.getSession().getActualCourse().getGroups();
// Convertimos la lista a una lista de MenuItems para el MenuButton
ArrayList<MenuItem> groupsItemsList = new ArrayList<>();
// En principio mostrarán todos los usuarios en cualquier grupo
MenuItem mi = (new MenuItem(all));
// Añadimos el manejador de eventos al primer MenuItem
mi.setOnAction(actionGroup);
groupsItemsList.add(mi);
for (int i = 0; i < groupsList.size(); i++) {
String group = groupsList.get(i);
mi = (new MenuItem(group));
// Añadimos el manejador de eventos a cada MenuItem
mi.setOnAction(actionGroup);
groupsItemsList.add(mi);
}
// Asignamos la lista de MenuItems al MenuButton "Grupo"
slcGroup.getItems().addAll(groupsItemsList);
slcGroup.setText(all);
}
/**
* Manejo de roles
*/
private void manejoRoles() {
EventHandler<ActionEvent> actionRole = selectRole();
// Cargamos una lista con los nombres de los roles
List<String> rolesList = UBULog.getSession().getActualCourse().getRoles();
// Convertimos la lista a una lista de MenuItems para el MenuButton
ArrayList<MenuItem> rolesItemsList = new ArrayList<>();
// En principio se mostrarón todos los usuarios con cualquier rol
MenuItem mi = (new MenuItem(all));
// Añadimos el manejador de eventos al primer MenuItem
mi.setOnAction(actionRole);
rolesItemsList.add(mi);
for (int i = 0; i < rolesList.size(); i++) {
String rol = rolesList.get(i);
mi = (new MenuItem(rol));
mi.setOnAction(actionRole);
// Añadimos el manejador de eventos a cada MenuItem
rolesItemsList.add(mi);
}
// Asignamos la lista de MenuItems al MenuButton "Rol"
slcRole.getItems().addAll(rolesItemsList);
slcRole.setText(all);
}
/**
* Manejador de eventos para el botón de filtro por roles. Devuelve un
* manejador de eventos para cada item.
*
* @return manejador de eventos de roles
*/
private EventHandler<ActionEvent> selectRole() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
// Obtenemos el ítem que se ha seleccionado
MenuItem mItem = (MenuItem) event.getSource();
// Obtenemos el rol por el que se quiere filtrar
filterRole = mItem.getText();
logger.info("-> Filtrando participantes por rol: " + filterRole);
filterParticipants();
slcRole.setText(filterRole);
}
};
}
/**
* Manejador de eventos para el botón de filtro por grupos. Devuelve un
* manejador de eventos para cada item.
*
* @return manejador de eventos de grupos
*/
private EventHandler<ActionEvent> selectGroup() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
// Obtenemos el ítem que se ha seleccionado
MenuItem mItem = (MenuItem) event.getSource();
// Obtenemos el grupo por el que se quire filtrar
filterGroup = mItem.getText();
logger.info("-> Filtrando participantes por grupo: " + filterGroup);
filterParticipants();
slcGroup.setText(filterGroup);
}
};
}
/**
* Manejador de eventos para el textField de filtro de participantes.
*
* @return manejador de eventos para el patrón de participantes
*/
private EventHandler<ActionEvent> inputParticipant() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
patternParticipants = tfdParticipants.getText();
logger.info("-> Filtrando participantes por nombre: " + patternParticipants);
filterParticipants();
}
};
}
/**
* Manejador de eventos para el botón selector de gráficos, selecciona un
* tipo de gráfico.
*
* @return manejador de eventos de grupos
*/
private EventHandler<ActionEvent> selectChart() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un MenuItem) y responde en
* consecuencia. El usuario elige un menuItem y cambia el tipo de
* gráfico.
*/
public void handle(ActionEvent chart) {
MenuItem mItem = (MenuItem) chart.getSource();
viewchart.setTypeChart(mItem.getId());
viewchart.generarGrafica();
engineChart.reload();
}
};
}
/**
* Filtra los participantes según el rol, el grupo y el patrón indicados
*/
public void filterParticipants() {
try {
boolean roleYes;
boolean groupYes;
boolean patternYes;
users = UBULog.getSession().getActualCourse().getEnrolledUsers();
// Cargamos la lista de los roles
ArrayList<EnrolledUser> nameUsers = new ArrayList<>();
// Obtenemos los participantes que tienen el rol elegido
for (int i = 0; i < users.size(); i++) {
// Filtrado por rol:
roleYes = false;
List<Role> roles = users.get(i).getRoles();
// Si no tiene rol
if (roles == null || (roles.isEmpty() && filterRole.equals(all))) {
roleYes = true;
} else {
for (int j = 0; j < roles.size(); j++) {
// Comprobamos si el usuario pasa el filtro de "rol"
if (roles.get(j).getName().equals(filterRole) || filterRole.equals(all)) {
roleYes = true;
}
}
}
// Filtrado por grupo:
groupYes = false;
List<Group> groups = users.get(i).getGroups();
if (groups == null || (groups.isEmpty() && filterGroup.equals(all))) {
groupYes = true;
} else {
for (int k = 0; k < groups.size(); k++) {
// Comprobamos si el usuario pasa el filtro de "grupo"
if (groups.get(k).getName().equals(filterGroup) || filterGroup.equals(all)) {
groupYes = true;
}
}
}
// Filtrado por patrón:
patternYes = false;
if (patternParticipants.equals("")) {
patternYes = true;
} else {
Pattern pattern = Pattern.compile(patternParticipants);
Matcher match = pattern.matcher(users.get(i).getFullName());
if (match.find()) {
patternYes = true;
}
}
// Si el usuario se corresponde con los filtros
if (groupYes && roleYes && patternYes)
nameUsers.add(users.get(i));
}
enrList = FXCollections.observableArrayList(nameUsers);
// Mostramos nievo número participantes
lblCountParticipants.setText("Participantes: " + nameUsers.size());
} catch (Exception e) {
logger.error("Error en el filtro participantes. {}", e);
}
listParticipants.setItems(enrList);
}
public EventHandler<ActionEvent> inputTable() {
return new EventHandler<ActionEvent>() {
public void handle(ActionEvent event) {
ArrayList<String> patternFilter = new ArrayList<>();
patternFilter.add(tfdDate.getText());
patternFilter.add(tfdNameUser.getText().toUpperCase());
patternFilter.add(tfdUserAffected.getText().toUpperCase());
patternFilter.add(tfdContext.getText().toUpperCase());
patternFilter.add(tfdComponent.getText().toUpperCase());
patternFilter.add(tfdEvent.getText().toUpperCase());
patternFilter.add(tfdDescription.getText().toUpperCase());
patternFilter.add(tfdPOrigin.getText().toUpperCase());
patternFilter.add(tfdIp.getText());
logger.info("-> Filtrando tabla: \n Fecha :" + patternFilter.get(0) + "\n Usuario afectado: "
+ patternFilter.get(1) + "\n usuario afectado: " + patternFilter.get(2) + "\n contexto: "
+ patternFilter.get(3) + "\n Componente: " + patternFilter.get(4) + "\n Evento: "
+ patternFilter.get(5) + "\n Descripción: " + patternFilter.get(6) + "\n Origen: "
+ patternFilter.get(7) + "\n ip: " + patternFilter.get(8));
filterTable(patternFilter);
}
};
}
protected void filterTable(ArrayList<String> patternFilter) {
ArrayList<Boolean> patterncomp = new ArrayList<>();
for (int i = 0; i < 9; ++i) {
patterncomp.add(false);
}
filterTableLogs.clear();
try {
if (filterLogs.isEmpty()) {
// buscar en log completo
filtroTableLogs(patternFilter, patterncomp, logs.getLogs());
} else {
// buscar en filterlog
filtroTableLogs(patternFilter, patterncomp, filterLogs);
}
viewTableLog.generarTablaLogs(filterTableLogs);
engineTableLogs.reload();
} catch (Exception e) {
logger.error("Error en el filtro de tabla. {}", e);
}
}
/**
* Método que filtra los log de la tabla.
*
* @param patternFilter,
* Lista del contenido puesto en los filtros
* @param patterncomp,
* lista de booleanos.
* @param ftLogs,
* logs filtrados.
*/
private void filtroTableLogs(ArrayList<String> patternFilter, ArrayList<Boolean> patterncomp, List<Log> list) {
for (int i = 0; i < list.size(); i++) {
for (int j = 0; j < patternFilter.size(); j++) {
if (patternFilter.get(j).equals("")) {
patterncomp.set(j, true);
} else {
Pattern pattern = Pattern.compile(patternFilter.get(j));
Matcher match = null;
switch (j) {
case 0:
match = pattern.matcher(list.get(i).getDate().get(Calendar.DAY_OF_MONTH) + "/"
+ (list.get(i).getDate().get(Calendar.MONTH) + 1) + "/"
+ list.get(i).getDate().get(Calendar.YEAR) + " "
+ list.get(i).getDate().get(Calendar.HOUR_OF_DAY) + ":"
+ list.get(i).getDate().get(Calendar.MINUTE));
break;
case 1:
match = pattern.matcher(list.get(i).getNameUser().toUpperCase());
break;
case 2:
match = pattern.matcher(list.get(i).getUserAffected().toUpperCase());
break;
case 3:
match = pattern.matcher(list.get(i).getContext().toUpperCase());
break;
case 4:
match = pattern.matcher(list.get(i).getComponent().toUpperCase());
break;
case 5:
match = pattern.matcher(list.get(i).getEvent().toUpperCase());
break;
case 6:
match = pattern.matcher(list.get(i).getDescription().toUpperCase());
break;
case 7:
match = pattern.matcher(list.get(i).getOrigin().toUpperCase());
break;
case 8:
match = pattern.matcher(list.get(i).getIp().toUpperCase());
break;
default:
match = pattern.matcher("*");
break;
}
if (match.find()) {
patterncomp.set(j, true);
}
}
}
if (patterncomp.get(0) && patterncomp.get(1) && patterncomp.get(2) && patterncomp.get(3)
&& patterncomp.get(4) && patterncomp.get(5) && patterncomp.get(6) && patterncomp.get(7)
&& patterncomp.get(8)) {
filterTableLogs.add(list.get(i));
}
for (int k = 0; k < patterncomp.size(); k++) {
patterncomp.set(k, false);
}
}
}
/**
* Manejador de eventos para las actividades. Devuelve un manejador de
* eventos para cada item.
*
* @return manejador de eventos para las actividades
*/
public EventHandler<ActionEvent> inputEvent() {
return new EventHandler<ActionEvent>() {
/**
* Recibe un evento (relacionado con un TreeItem) y responde en
* consecuencia. El usuario elige un menuItem y filtra la lista de
* participantes
*/
public void handle(ActionEvent event) {
patternEvents = tfdEvents.getText();
logger.info("-> Filtrando calificador por nombre: " + patternEvents);
filterEvents();
}
};
}
/**
* Filtra la lista de actividades del calificador según el tipo y el patrón
* introducidos.
*/
public void filterEvents() {
try {
boolean patternYes;
ArrayList<model.Event> filterevents = new ArrayList<>();
eventList = FXCollections.observableArrayList(logs.getEvents().values());
// Obtenemos los participantes que tienen el rol elegido
for (int i = 0; i < eventList.size(); i++) {
// Filtrado por patrón:
patternYes = false;
if (patternEvents.equals("")) {
patternYes = true;
} else {
Pattern pattern = Pattern.compile(patternEvents);
Matcher match = pattern.matcher(eventList.get(i).getNameEvent());
if (match.find()) {
patternYes = true;
}
}
// Si el usuario se corresponde con los filtros
if (patternYes)
filterevents.add(eventList.get(i));
}
eventList = FXCollections.observableArrayList(filterevents);
} catch (Exception e) {
logger.error("Error en filtro de eventos {}", e);
}
listEvents.setItems(eventList);
}
/**
* Cambia la asignatura actual y carga otra
*
* @throws Exception
* , escepción.
*/
public void changeCourse() {
try {
logger.info("Cambiando de asignatura...");
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/view/Welcome.fxml"));
UBULog.getStage().close();
logger.info("Accediendo a UBULog...");
UBULog.setStage(new Stage());
Parent root = loader.load();
Scene scene = new Scene(root);
UBULog.getStage().setScene(scene);
UBULog.getStage().getIcons().add(new Image("/img/logo_min.png"));
UBULog.getStage().setTitle("UBULog");
UBULog.getStage().show();
clearData();
} catch (IOException e) {
logger.error("Error al cambiar asignatira. {}", e);
}
}
private void clearData() {
listParticipants.getSelectionModel().clearSelection();
listEvents.getSelectionModel().clearSelection();
filterLogs.clear();
viewchart.getDate().clear();
viewchart.getLabel().clear();
logs = null;
users.clear();
filterTableLogs.clear();
loadHTML(new ArrayList<>());
}
/**
* Exporta el gráfico. El usuario podrá elegir entre el formato .png o .jpg
* para guardar la imagen.
*
* @throws Exception
* excepción
*/
public void saveChart() {
WritableImage image = chart.snapshot(new SnapshotParameters(), null);
File file = new File("chart.png");
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Guardar gráfico");
fileChooser.setInitialFileName("chart");
fileChooser.setInitialDirectory(file.getParentFile());
fileChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter(".png", "*.*"),
new FileChooser.ExtensionFilter("*.jpg", "*.jpg"), new FileChooser.ExtensionFilter("*.png", "*.png"));
try {
file = fileChooser.showSaveDialog(UBULog.getStage());
if (file != null) {
ImageIO.write(SwingFXUtils.fromFXImage(image, null), "png", file);
}
} catch (Exception e) {
logger.error("Error en guardado de gráfico. {}", e);
}
}
/**
* Método para generar gráfica dependiente de la tabla.
*/
public void generateChart() {
viewchart.setLabel(selectedParticipants, selectedEvents, filterTableLogs);
viewchart.generarGrafica();
engineChart.reload();
}
/**
* Vuelve a la ventana de login de usuario
*
* @throws Exception
* excepción
*/
public void logOut() {
try {
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("/view/Login.fxml"));
UBULog.getStage().close();
logger.info("Cerrando sesión de usuario");
UBULog.setStage(new Stage());
Parent root = loader.load();
Scene scene = new Scene(root);
UBULog.getStage().setScene(scene);
UBULog.getStage().getIcons().add(new Image("/img/logo_min.png"));
UBULog.getStage().setTitle("UBULog");
UBULog.getStage().show();
clearData();
} catch (IOException e) {
logger.error("Error al deslogearse. {}", e);
}
}
/**
* Deja de seleccionar los participantes/actividades y borra el gráfico.
*
* @param actionEvent,
* acción del evento.
* @throws Exception
* excepción
*/
public void clearSelection() {
if (!logs.getLogs().isEmpty()) {
listParticipants.getSelectionModel().clearSelection();
listEvents.getSelectionModel().clearSelection();
filterLogs.clear();
viewchart.getDate().clear();
viewchart.getLabel().clear();
loadHTML(logs.getLogs());
}
}
/**
* Abre en el navegador el repositorio del proyecto.
*
* @throws Exception
* excepción
*/
public void aboutUBULog() {
try {
Desktop.getDesktop().browse(new URL("https://github.com/trona85/GII-17.1B-UBULog-1.0").toURI());
} catch (Exception e) {
logger.error("Error al abrir GigHub. {}", e);
}
}
/**
* Boton para cargar documento
*
* @param actionEvent,
* acción del evento.
*/
public void cargaDocumento() {
try {
this.logs = new CsvParser();
FileChooser fileChooser = new FileChooser();
File file = fileChooser.showOpenDialog(UBULog.getStage());
if (file == null) {
throw new UBULogException(UBULogError.FICHERO_CANCELADO);
}
if (!file.toString().contains(".csv")) {
throw new UBULogException(UBULogError.FICHERO_NO_VALIDO);
}
Alert alert = modalOpen();
logs.setFile(file.toString());
logs.readDocument();
initializeDataSet(logs);
alert.close();
} catch (UBULogException e) {
logger.info(e.getMessage());
if (e.getError() != UBULogError.FICHERO_CANCELADO) {
cargaDocumento();
}
}
}
/**
* Método que carga un modal.
*
* @return alert.
*/
private Alert modalOpen() {
Alert alert = new Alert(AlertType.INFORMATION);
alert.setHeight(300);
alert.setWidth(300);
alert.initModality(Modality.APPLICATION_MODAL);
alert.initOwner(UBULog.getStage());
alert.getDialogPane().setContentText("Se esta cargando el registro de la asignatura:\n"
+ UBULog.getSession().getActualCourse().getFullName() + "\nPuede tardar unos minutos");
alert.show();
return alert;
}
/**
* Boton para cargar documento online.
*
* @throws IOException
* excepción
*/
public void cargaDocumentoOnline() throws IOException {
this.logs = new CsvParser();
WebScripting webScripting = null;
PrintWriter pw = null;
File file = null;
try {
Alert alert = modalOpen();
webScripting = new WebScripting();
webScripting.getResponsiveWeb();
try (FileWriter fileWriter = new FileWriter("./tempcsv.csv")) {
pw = new PrintWriter(fileWriter);
pw.print(webScripting.getResponsive());
} finally {
if (pw != null) {
pw.close();
}
}
file = new File("tempcsv.csv");
logs.setFile(file.getAbsolutePath());
logs.readDocument();
initializeDataSet(logs);
alert.close();
Files.delete(file.toPath());
} catch (FailingHttpStatusCodeException e) {
logger.error(e.getMessage());
} catch (UBULogException e) {
logger.info(e.getMessage());
} finally {
if (webScripting != null) {
webScripting.close();
}
}
}
/**
* Inicializamos los datos necesarios.
*
* @param logs,
* logs.
*/
private void initializeDataSet(CsvParser logs) {
setDisableComponentInterfaz(false);
listParticipants.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
listEvents.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
/// Mostramos la lista de participantes y eventos
eventList = FXCollections.observableArrayList(logs.getEvents().values());
Collections.sort(eventList, (o1, o2) -> o1.getNameEvent().compareTo(o2.getNameEvent()));
listEvents.setItems(eventList);
loadHTML(logs.getLogs());
}
/**
* Método para desactivar o activar botones de la interfaz.
*
* @param disable,
* booleano.
*
*/
private void setDisableComponentInterfaz(boolean disable) {
listParticipants.setDisable(disable);
tfdParticipants.setDisable(disable);
tfdEvents.setDisable(disable);
listEvents.setDisable(disable);
btnchart.setDisable(disable);
tfdDate.setDisable(disable);
tfdNameUser.setDisable(disable);
tfdUserAffected.setDisable(disable);
tfdContext.setDisable(disable);
tfdComponent.setDisable(disable);
tfdEvent.setDisable(disable);
tfdDescription.setDisable(disable);
tfdPOrigin.setDisable(disable);
tfdIp.setDisable(disable);
}
/**
*
* Botón "Salir". Cierra la aplicación.
*
* @param actionEvent,
* acción del evento.
* @throws Exception
* excepción
*/
public void closeApplication() {
logger.info("Cerrando aplicación");
UBULog.getStage().close();
}
public static void errorDeConexion() {
Alert alert = new Alert(AlertType.ERROR);
alert.initModality(Modality.APPLICATION_MODAL);
alert.initOwner(UBULog.getStage());
alert.getDialogPane().setContentText("Su equipo ha perdido la conexión a Internet");
logger.warn("Su equipo ha perdido la conexión a Internet");
ButtonType buttonSalir = new ButtonType("Cerrar UBULog");
alert.getButtonTypes().setAll(buttonSalir);
Optional<ButtonType> result = alert.showAndWait();
if (result.get() == buttonSalir)
UBULog.getStage().close();
}
} | Refactirización de método.
[#17]
| src/main/java/controllers/MainController.java | Refactirización de método. [#17] | <ide><path>rc/main/java/controllers/MainController.java
<ide> import java.io.FileWriter;
<ide> import java.io.IOException;
<ide> import java.io.PrintWriter;
<del>import java.net.MalformedURLException;
<del>import java.net.URISyntaxException;
<ide> import java.net.URL;
<ide> import java.nio.file.Files;
<ide> import java.util.ArrayList;
<ide> * , escepción.
<ide> */
<ide> public void changeCourse() {
<del> try {
<del>
<del> logger.info("Cambiando de asignatura...");
<del> FXMLLoader loader = new FXMLLoader();
<del> loader.setLocation(getClass().getResource("/view/Welcome.fxml"));
<del> UBULog.getStage().close();
<del> logger.info("Accediendo a UBULog...");
<del> UBULog.setStage(new Stage());
<del>
<del> Parent root = loader.load();
<del> Scene scene = new Scene(root);
<del> UBULog.getStage().setScene(scene);
<del> UBULog.getStage().getIcons().add(new Image("/img/logo_min.png"));
<del> UBULog.getStage().setTitle("UBULog");
<del> UBULog.getStage().show();
<del>
<del> clearData();
<del> } catch (IOException e) {
<del> logger.error("Error al cambiar asignatira. {}", e);
<del> }
<add>
<add> logger.info("Cambiando de asignatura...");
<add> changeView("/view/Welcome.fxml");
<add> logger.info("Accediendo a UBULog...");
<ide>
<ide> }
<ide>
<ide> * excepción
<ide> */
<ide> public void logOut() {
<add> logger.info("Cerrando sesión de usuario");
<add> changeView("/view/Login.fxml");
<add>
<add> }
<add>
<add> /**
<add> * Metodo que cambia la ventana cuando cambias de asignatura o te deslogeas.
<add> *
<add> * @throws IOException
<add> * excepción.
<add> */
<add> private void changeView(String resource) {
<ide> try {
<ide> FXMLLoader loader = new FXMLLoader();
<del> loader.setLocation(getClass().getResource("/view/Login.fxml"));
<add> loader.setLocation(getClass().getResource(resource));
<ide> UBULog.getStage().close();
<del> logger.info("Cerrando sesión de usuario");
<ide> UBULog.setStage(new Stage());
<ide> Parent root = loader.load();
<ide> Scene scene = new Scene(root);
<ide>
<ide> clearData();
<ide> } catch (IOException e) {
<del> logger.error("Error al deslogearse. {}", e);
<add> logger.error("Error al cambiar asignatira. {}", e);
<ide> }
<ide> }
<ide>
<ide> * excepción
<ide> */
<ide> public void clearSelection() {
<del> if (!logs.getLogs().isEmpty()) {
<del> listParticipants.getSelectionModel().clearSelection();
<del> listEvents.getSelectionModel().clearSelection();
<del> filterLogs.clear();
<del> viewchart.getDate().clear();
<del> viewchart.getLabel().clear();
<del>
<del> loadHTML(logs.getLogs());
<del> }
<add> if (logs != null)
<add> if (!logs.getLogs().isEmpty()) {
<add> listParticipants.getSelectionModel().clearSelection();
<add> listEvents.getSelectionModel().clearSelection();
<add> filterLogs.clear();
<add> viewchart.getDate().clear();
<add> viewchart.getLabel().clear();
<add>
<add> loadHTML(logs.getLogs());
<add> }
<ide>
<ide> }
<ide> |
|
Java | apache-2.0 | c4483f5ee1b86d573e97683ef716a357d461cb85 | 0 | jagguli/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,clumsy/intellij-community,ibinti/intellij-community,consulo/consulo,idea4bsd/idea4bsd,xfournet/intellij-community,allotria/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,apixandru/intellij-community,supersven/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,apixandru/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,allotria/intellij-community,slisson/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,da1z/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,signed/intellij-community,fitermay/intellij-community,diorcety/intellij-community,holmes/intellij-community,ryano144/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,slisson/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,caot/intellij-community,slisson/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,adedayo/intellij-community,clumsy/intellij-community,jagguli/intellij-community,semonte/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,izonder/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,izonder/intellij-community,asedunov/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,ernestp/consulo,fitermay/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,kdwink/intellij-community,adedayo/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,slisson/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,izonder/intellij-community,slisson/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,xfournet/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,signed/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,holmes/intellij-community,semonte/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,signed/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,ernestp/consulo,dslomov/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,allotria/intellij-community,youdonghai/intellij-community,kool79/intellij-community,caot/intellij-community,ernestp/consulo,hurricup/intellij-community,adedayo/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,holmes/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,kool79/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,FHannes/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,robovm/robovm-studio,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,petteyg/intellij-community,petteyg/intellij-community,signed/intellij-community,slisson/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,fnouama/intellij-community,kdwink/intellij-community,apixandru/intellij-community,da1z/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,adedayo/intellij-community,holmes/intellij-community,kool79/intellij-community,samthor/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,amith01994/intellij-community,ernestp/consulo,da1z/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,semonte/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,kool79/intellij-community,blademainer/intellij-community,holmes/intellij-community,amith01994/intellij-community,supersven/intellij-community,gnuhub/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,FHannes/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,samthor/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,semonte/intellij-community,Lekanich/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,hurricup/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,izonder/intellij-community,robovm/robovm-studio,xfournet/intellij-community,allotria/intellij-community,FHannes/intellij-community,vladmm/intellij-community,kool79/intellij-community,samthor/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,consulo/consulo,TangHao1987/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,amith01994/intellij-community,semonte/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,dslomov/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,ibinti/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,consulo/consulo,allotria/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,caot/intellij-community,supersven/intellij-community,vvv1559/intellij-community,consulo/consulo,mglukhikh/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,caot/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,signed/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,vladmm/intellij-community,retomerz/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,samthor/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,apixandru/intellij-community,signed/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,da1z/intellij-community,retomerz/intellij-community,kdwink/intellij-community,supersven/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,izonder/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,caot/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,supersven/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,adedayo/intellij-community,retomerz/intellij-community,holmes/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,orekyuu/intellij-community,supersven/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,allotria/intellij-community,signed/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,ernestp/consulo,adedayo/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,asedunov/intellij-community,apixandru/intellij-community,diorcety/intellij-community,samthor/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,consulo/consulo,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,FHannes/intellij-community,clumsy/intellij-community,slisson/intellij-community,fnouama/intellij-community,slisson/intellij-community,jagguli/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,supersven/intellij-community,samthor/intellij-community,blademainer/intellij-community,da1z/intellij-community,hurricup/intellij-community,supersven/intellij-community,clumsy/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,izonder/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,amith01994/intellij-community,caot/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,allotria/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,holmes/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,adedayo/intellij-community,kool79/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,robovm/robovm-studio,dslomov/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,retomerz/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,semonte/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,apixandru/intellij-community,clumsy/intellij-community,caot/intellij-community,asedunov/intellij-community,caot/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,hurricup/intellij-community,supersven/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,samthor/intellij-community,vvv1559/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,asedunov/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,diorcety/intellij-community,supersven/intellij-community,fitermay/intellij-community,dslomov/intellij-community,fitermay/intellij-community,ryano144/intellij-community,hurricup/intellij-community,diorcety/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,da1z/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,ryano144/intellij-community,fnouama/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,samthor/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,caot/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,ryano144/intellij-community,dslomov/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,izonder/intellij-community,blademainer/intellij-community,kool79/intellij-community,kdwink/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,da1z/intellij-community,holmes/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,xfournet/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,youdonghai/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.mac;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.PathChooserDialog;
import com.intellij.openapi.fileChooser.impl.FileChooserUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.impl.IdeMenuBar;
import com.intellij.ui.mac.foundation.Foundation;
import com.intellij.ui.mac.foundation.ID;
import com.intellij.ui.mac.foundation.MacUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.sun.jna.Callback;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.util.*;
import java.util.List;
/**
* @author spleaner
*/
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
public class MacFileChooserDialogImpl implements PathChooserDialog {
private static final int OK = 1;
private static final Map<ID, MacFileChooserDialogImpl> ourImplMap = new HashMap<ID, MacFileChooserDialogImpl>(2);
private final FileChooserDescriptor myChooserDescriptor;
private final Project myProject;
private Consumer<List<VirtualFile>> myCallback;
private static final Callback SHOULD_ENABLE_URL = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID url) {
return true;
}
};
/*
private static final Callback SHOULD_SHOW_FILENAME_CALLBACK = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID filename) {
if (filename == null || filename.intValue() == 0) return false;
final String fileName = Foundation.toStringViaUTF8(filename);
if (fileName == null) return false;
final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(fileName);
return virtualFile == null || (virtualFile.isDirectory() || myChooserDescriptor.isFileSelectable(virtualFile));
}
};
private static final Callback IS_VALID_FILENAME_CALLBACK = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID filename) {
if (filename == null || filename.intValue() == 0) return false;
final String fileName = Foundation.toStringViaUTF8(filename);
if (fileName == null) return false;
final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(fileName);
return virtualFile == null || (!virtualFile.isDirectory() || myChooserDescriptor.isFileSelectable(virtualFile));
}
};
*/
private static final Callback OPEN_PANEL_DID_END = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public void callback(ID self, String selector, ID openPanelDidEnd, ID returnCode, ID contextInfo) {
final MacFileChooserDialogImpl impl = ourImplMap.remove(self);
try {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
final IdeMenuBar bar = getMenuBar();
if (bar != null) {
bar.enableUpdates();
}
}
});
final List<String> resultPaths = processResult(returnCode, openPanelDidEnd);
if (resultPaths.size() > 0) {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
final List<VirtualFile> files = getChosenFiles(resultPaths);
if (files.size() > 0) {
FileChooserUtil.setLastOpenedFile(impl.myProject, files.get(files.size() - 1));
impl.myCallback.consume(files);
}
}
});
} else if (impl.myCallback instanceof FileChooser.FileChooserConsumer) {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
((FileChooser.FileChooserConsumer)impl.myCallback).cancelled();
}
});
}
}
finally {
Foundation.cfRelease(self);
}
}
};
@NotNull
private static List<String> processResult(final ID result, final ID panel) {
final List<String> resultPaths = new ArrayList<String>();
if (result != null && OK == result.intValue()) {
final ID fileNamesArray = invoke(panel, "filenames");
final ID enumerator = invoke(fileNamesArray, "objectEnumerator");
while (true) {
final ID filename = invoke(enumerator, "nextObject");
if (filename == null || 0 == filename.intValue()) break;
final String path = Foundation.toStringViaUTF8(filename);
if (path != null) {
resultPaths.add(path);
}
}
}
return resultPaths;
}
@NotNull
private static List<VirtualFile> getChosenFiles(final List<String> paths) {
if (paths == null || paths.size() == 0) return Collections.emptyList();
final LocalFileSystem fs = LocalFileSystem.getInstance();
final List<VirtualFile> files = ContainerUtil.newArrayListWithExpectedSize(paths.size());
for (String path : paths) {
final String vfsPath = FileUtil.toSystemIndependentName(path);
final VirtualFile file = fs.refreshAndFindFileByPath(vfsPath);
if (file != null && file.isValid()) {
files.add(file);
}
}
return files;
}
private static final Callback MAIN_THREAD_RUNNABLE = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public void callback(ID self, String selector, ID toSelect) {
final ID nsOpenPanel = Foundation.getObjcClass("NSOpenPanel");
final ID chooser = invoke(nsOpenPanel, "openPanel");
final FileChooserDescriptor chooserDescriptor = ourImplMap.get(self).myChooserDescriptor;
invoke(chooser, "setPrompt:", Foundation.nsString("Choose"));
invoke(chooser, "setCanChooseFiles:", chooserDescriptor.isChooseFiles() || chooserDescriptor.isChooseJars());
invoke(chooser, "setCanChooseDirectories:", chooserDescriptor.isChooseFolders());
invoke(chooser, "setAllowsMultipleSelection:", chooserDescriptor.isChooseMultiple());
invoke(chooser, "setTreatsFilePackagesAsDirectories:", chooserDescriptor.isChooseFolders());
if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("setCanCreateDirectories:"))) {
invoke(chooser, "setCanCreateDirectories:", true);
}
else if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("_setIncludeNewFolderButton:"))) {
invoke(chooser, "_setIncludeNewFolderButton:", true);
}
final Boolean showHidden = chooserDescriptor.getUserData(PathChooserDialog.NATIVE_MAC_CHOOSER_SHOW_HIDDEN_FILES);
if (Boolean.TRUE.equals(showHidden) || Registry.is("ide.mac.file.chooser.show.hidden.files")) {
if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("setShowsHiddenFiles:"))) {
invoke(chooser, "setShowsHiddenFiles:", true);
}
}
invoke(chooser, "setDelegate:", self);
ID directory = null;
ID file = null;
final String toSelectPath = toSelect == null || toSelect.intValue() == 0 ? null : Foundation.toStringViaUTF8(toSelect);
if (toSelectPath != null) {
final File toSelectFile = new File(toSelectPath);
if (toSelectFile.isDirectory()) {
directory = toSelect;
}
else if (toSelectFile.isFile()) {
directory = Foundation.nsString(toSelectFile.getParent());
file = Foundation.nsString(toSelectFile.getName());
}
}
ID types = null;
if (!chooserDescriptor.isChooseFiles() && chooserDescriptor.isChooseJars()) {
types = invoke("NSArray", "arrayWithObject:", Foundation.nsString("jar"));
}
final Window activeWindow = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
if (activeWindow != null) {
String activeWindowTitle = null;
if (activeWindow instanceof Frame) {
activeWindowTitle = ((Frame)activeWindow).getTitle();
}
else if (activeWindow instanceof JDialog) {
activeWindowTitle = ((JDialog)activeWindow).getTitle();
}
final ID focusedWindow = MacUtil.findWindowForTitle(activeWindowTitle);
if (focusedWindow != null) {
invoke(chooser, "beginSheetForDirectory:file:types:modalForWindow:modalDelegate:didEndSelector:contextInfo:",
directory, file, types, focusedWindow, self, Foundation.createSelector("openPanelDidEnd:returnCode:contextInfo:"), null);
}
}
}
};
static {
final ID delegate = Foundation.allocateObjcClassPair(Foundation.getObjcClass("NSObject"), "NSOpenPanelDelegate_");
//if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:shouldShowFilename:"), SHOULD_SHOW_FILENAME_CALLBACK, "B*")) {
// throw new RuntimeException("Unable to add method to objective-c delegate class!");
//}
//if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:isValidFilename:"), IS_VALID_FILENAME_CALLBACK, "B*")) {
// throw new RuntimeException("Unable to add method to objective-c delegate class!");
//}
if (!Foundation.addMethod(delegate, Foundation.createSelector("showOpenPanel:"), MAIN_THREAD_RUNNABLE, "v*")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
if (!Foundation.addMethod(delegate, Foundation.createSelector("openPanelDidEnd:returnCode:contextInfo:"), OPEN_PANEL_DID_END, "v*i")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:shouldEnableURL:"), SHOULD_ENABLE_URL, "B@@")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
Foundation.registerObjcClassPair(delegate);
}
public MacFileChooserDialogImpl(@NotNull final FileChooserDescriptor chooserDescriptor, final Project project) {
myChooserDescriptor = chooserDescriptor;
myProject = project;
}
@Override
public void choose(@Nullable final VirtualFile toSelect, @NotNull final Consumer<List<VirtualFile>> callback) {
myCallback = callback;
final VirtualFile lastOpenedFile = FileChooserUtil.getLastOpenedFile(myProject);
final VirtualFile selectFile = FileChooserUtil.getFileToSelect(myChooserDescriptor, myProject, toSelect, lastOpenedFile);
final String selectPath = selectFile != null ? FileUtil.toSystemDependentName(selectFile.getPath()) : null;
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
showNativeChooserAsSheet(MacFileChooserDialogImpl.this, selectPath);
}
});
}
@Nullable
private static IdeMenuBar getMenuBar() {
Window cur = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
while (cur != null) {
if (cur instanceof JFrame) {
final JMenuBar menuBar = ((JFrame)cur).getJMenuBar();
if (menuBar instanceof IdeMenuBar) {
return (IdeMenuBar)menuBar;
}
}
cur = cur.getOwner();
}
return null;
}
private static void showNativeChooserAsSheet(@NotNull final MacFileChooserDialogImpl impl, @Nullable final String toSelect) {
final IdeMenuBar bar = getMenuBar();
if (bar != null) {
bar.disableUpdates();
}
final ID autoReleasePool = createAutoReleasePool();
try {
final ID delegate = invoke(Foundation.getObjcClass("NSOpenPanelDelegate_"), "new");
Foundation.cfRetain(delegate);
ourImplMap.put(delegate, impl);
final ID select = toSelect == null ? null : Foundation.nsString(toSelect);
invoke(delegate, "performSelectorOnMainThread:withObject:waitUntilDone:", Foundation.createSelector("showOpenPanel:"), select, false);
}
finally {
invoke(autoReleasePool, "release");
}
}
private static ID createAutoReleasePool() {
return invoke("NSAutoreleasePool", "new");
}
private static ID invoke(@NotNull final String className, @NotNull final String selector, Object... args) {
return invoke(Foundation.getObjcClass(className), selector, args);
}
private static ID invoke(@NotNull final ID id, @NotNull final String selector, Object... args) {
return Foundation.invoke(id, Foundation.createSelector(selector), args);
}
}
| platform/platform-impl/src/com/intellij/ui/mac/MacFileChooserDialogImpl.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.mac;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.PathChooserDialog;
import com.intellij.openapi.fileChooser.impl.FileChooserUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.impl.IdeMenuBar;
import com.intellij.ui.mac.foundation.Foundation;
import com.intellij.ui.mac.foundation.ID;
import com.intellij.ui.mac.foundation.MacUtil;
import com.intellij.util.Consumer;
import com.intellij.util.containers.ContainerUtil;
import com.sun.jna.Callback;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.io.File;
import java.util.*;
import java.util.List;
/**
* @author spleaner
*/
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod")
public class MacFileChooserDialogImpl implements PathChooserDialog {
private static final int OK = 1;
private static final Map<ID, MacFileChooserDialogImpl> ourImplMap = new HashMap<ID, MacFileChooserDialogImpl>(2);
private final FileChooserDescriptor myChooserDescriptor;
private final Project myProject;
private Consumer<List<VirtualFile>> myCallback;
private static final Callback SHOULD_ENABLE_URL = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID url) {
return true;
}
};
/*
private static final Callback SHOULD_SHOW_FILENAME_CALLBACK = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID filename) {
if (filename == null || filename.intValue() == 0) return false;
final String fileName = Foundation.toStringViaUTF8(filename);
if (fileName == null) return false;
final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(fileName);
return virtualFile == null || (virtualFile.isDirectory() || myChooserDescriptor.isFileSelectable(virtualFile));
}
};
private static final Callback IS_VALID_FILENAME_CALLBACK = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public boolean callback(ID self, String selector, ID panel, ID filename) {
if (filename == null || filename.intValue() == 0) return false;
final String fileName = Foundation.toStringViaUTF8(filename);
if (fileName == null) return false;
final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(fileName);
return virtualFile == null || (!virtualFile.isDirectory() || myChooserDescriptor.isFileSelectable(virtualFile));
}
};
*/
private static final Callback OPEN_PANEL_DID_END = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public void callback(ID self, String selector, ID openPanelDidEnd, ID returnCode, ID contextInfo) {
final MacFileChooserDialogImpl impl = ourImplMap.remove(self);
try {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
final IdeMenuBar bar = getMenuBar();
if (bar != null) {
bar.enableUpdates();
}
}
});
final List<String> resultPaths = processResult(returnCode, openPanelDidEnd);
if (resultPaths.size() > 0) {
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
final List<VirtualFile> files = getChosenFiles(resultPaths);
if (files.size() > 0) {
FileChooserUtil.setLastOpenedFile(impl.myProject, files.get(files.size() - 1));
impl.myCallback.consume(files);
}
}
});
} else if (impl.myCallback instanceof FileChooser.FileChooserConsumer) {
((FileChooser.FileChooserConsumer)impl.myCallback).cancelled();
}
}
finally {
Foundation.cfRelease(self);
}
}
};
@NotNull
private static List<String> processResult(final ID result, final ID panel) {
final List<String> resultPaths = new ArrayList<String>();
if (result != null && OK == result.intValue()) {
final ID fileNamesArray = invoke(panel, "filenames");
final ID enumerator = invoke(fileNamesArray, "objectEnumerator");
while (true) {
final ID filename = invoke(enumerator, "nextObject");
if (filename == null || 0 == filename.intValue()) break;
final String path = Foundation.toStringViaUTF8(filename);
if (path != null) {
resultPaths.add(path);
}
}
}
return resultPaths;
}
@NotNull
private static List<VirtualFile> getChosenFiles(final List<String> paths) {
if (paths == null || paths.size() == 0) return Collections.emptyList();
final LocalFileSystem fs = LocalFileSystem.getInstance();
final List<VirtualFile> files = ContainerUtil.newArrayListWithExpectedSize(paths.size());
for (String path : paths) {
final String vfsPath = FileUtil.toSystemIndependentName(path);
final VirtualFile file = fs.refreshAndFindFileByPath(vfsPath);
if (file != null && file.isValid()) {
files.add(file);
}
}
return files;
}
private static final Callback MAIN_THREAD_RUNNABLE = new Callback() {
@SuppressWarnings("UnusedDeclaration")
public void callback(ID self, String selector, ID toSelect) {
final ID nsOpenPanel = Foundation.getObjcClass("NSOpenPanel");
final ID chooser = invoke(nsOpenPanel, "openPanel");
final FileChooserDescriptor chooserDescriptor = ourImplMap.get(self).myChooserDescriptor;
invoke(chooser, "setPrompt:", Foundation.nsString("Choose"));
invoke(chooser, "setCanChooseFiles:", chooserDescriptor.isChooseFiles() || chooserDescriptor.isChooseJars());
invoke(chooser, "setCanChooseDirectories:", chooserDescriptor.isChooseFolders());
invoke(chooser, "setAllowsMultipleSelection:", chooserDescriptor.isChooseMultiple());
invoke(chooser, "setTreatsFilePackagesAsDirectories:", chooserDescriptor.isChooseFolders());
if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("setCanCreateDirectories:"))) {
invoke(chooser, "setCanCreateDirectories:", true);
}
else if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("_setIncludeNewFolderButton:"))) {
invoke(chooser, "_setIncludeNewFolderButton:", true);
}
final Boolean showHidden = chooserDescriptor.getUserData(PathChooserDialog.NATIVE_MAC_CHOOSER_SHOW_HIDDEN_FILES);
if (Boolean.TRUE.equals(showHidden) || Registry.is("ide.mac.file.chooser.show.hidden.files")) {
if (Foundation.isClassRespondsToSelector(nsOpenPanel, Foundation.createSelector("setShowsHiddenFiles:"))) {
invoke(chooser, "setShowsHiddenFiles:", true);
}
}
invoke(chooser, "setDelegate:", self);
ID directory = null;
ID file = null;
final String toSelectPath = toSelect == null || toSelect.intValue() == 0 ? null : Foundation.toStringViaUTF8(toSelect);
if (toSelectPath != null) {
final File toSelectFile = new File(toSelectPath);
if (toSelectFile.isDirectory()) {
directory = toSelect;
}
else if (toSelectFile.isFile()) {
directory = Foundation.nsString(toSelectFile.getParent());
file = Foundation.nsString(toSelectFile.getName());
}
}
ID types = null;
if (!chooserDescriptor.isChooseFiles() && chooserDescriptor.isChooseJars()) {
types = invoke("NSArray", "arrayWithObject:", Foundation.nsString("jar"));
}
final Window activeWindow = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
if (activeWindow != null) {
String activeWindowTitle = null;
if (activeWindow instanceof Frame) {
activeWindowTitle = ((Frame)activeWindow).getTitle();
}
else if (activeWindow instanceof JDialog) {
activeWindowTitle = ((JDialog)activeWindow).getTitle();
}
final ID focusedWindow = MacUtil.findWindowForTitle(activeWindowTitle);
if (focusedWindow != null) {
invoke(chooser, "beginSheetForDirectory:file:types:modalForWindow:modalDelegate:didEndSelector:contextInfo:",
directory, file, types, focusedWindow, self, Foundation.createSelector("openPanelDidEnd:returnCode:contextInfo:"), null);
}
}
}
};
static {
final ID delegate = Foundation.allocateObjcClassPair(Foundation.getObjcClass("NSObject"), "NSOpenPanelDelegate_");
//if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:shouldShowFilename:"), SHOULD_SHOW_FILENAME_CALLBACK, "B*")) {
// throw new RuntimeException("Unable to add method to objective-c delegate class!");
//}
//if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:isValidFilename:"), IS_VALID_FILENAME_CALLBACK, "B*")) {
// throw new RuntimeException("Unable to add method to objective-c delegate class!");
//}
if (!Foundation.addMethod(delegate, Foundation.createSelector("showOpenPanel:"), MAIN_THREAD_RUNNABLE, "v*")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
if (!Foundation.addMethod(delegate, Foundation.createSelector("openPanelDidEnd:returnCode:contextInfo:"), OPEN_PANEL_DID_END, "v*i")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
if (!Foundation.addMethod(delegate, Foundation.createSelector("panel:shouldEnableURL:"), SHOULD_ENABLE_URL, "B@@")) {
throw new RuntimeException("Unable to add method to objective-c delegate class!");
}
Foundation.registerObjcClassPair(delegate);
}
public MacFileChooserDialogImpl(@NotNull final FileChooserDescriptor chooserDescriptor, final Project project) {
myChooserDescriptor = chooserDescriptor;
myProject = project;
}
@Override
public void choose(@Nullable final VirtualFile toSelect, @NotNull final Consumer<List<VirtualFile>> callback) {
myCallback = callback;
final VirtualFile lastOpenedFile = FileChooserUtil.getLastOpenedFile(myProject);
final VirtualFile selectFile = FileChooserUtil.getFileToSelect(myChooserDescriptor, myProject, toSelect, lastOpenedFile);
final String selectPath = selectFile != null ? FileUtil.toSystemDependentName(selectFile.getPath()) : null;
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
showNativeChooserAsSheet(MacFileChooserDialogImpl.this, selectPath);
}
});
}
@Nullable
private static IdeMenuBar getMenuBar() {
Window cur = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
while (cur != null) {
if (cur instanceof JFrame) {
final JMenuBar menuBar = ((JFrame)cur).getJMenuBar();
if (menuBar instanceof IdeMenuBar) {
return (IdeMenuBar)menuBar;
}
}
cur = cur.getOwner();
}
return null;
}
private static void showNativeChooserAsSheet(@NotNull final MacFileChooserDialogImpl impl, @Nullable final String toSelect) {
final IdeMenuBar bar = getMenuBar();
if (bar != null) {
bar.disableUpdates();
}
final ID autoReleasePool = createAutoReleasePool();
try {
final ID delegate = invoke(Foundation.getObjcClass("NSOpenPanelDelegate_"), "new");
Foundation.cfRetain(delegate);
ourImplMap.put(delegate, impl);
final ID select = toSelect == null ? null : Foundation.nsString(toSelect);
invoke(delegate, "performSelectorOnMainThread:withObject:waitUntilDone:", Foundation.createSelector("showOpenPanel:"), select, false);
}
finally {
invoke(autoReleasePool, "release");
}
}
private static ID createAutoReleasePool() {
return invoke("NSAutoreleasePool", "new");
}
private static ID invoke(@NotNull final String className, @NotNull final String selector, Object... args) {
return invoke(Foundation.getObjcClass(className), selector, args);
}
private static ID invoke(@NotNull final ID id, @NotNull final String selector, Object... args) {
return Foundation.invoke(id, Foundation.createSelector(selector), args);
}
}
| should invoke later
| platform/platform-impl/src/com/intellij/ui/mac/MacFileChooserDialogImpl.java | should invoke later | <ide><path>latform/platform-impl/src/com/intellij/ui/mac/MacFileChooserDialogImpl.java
<ide> }
<ide> });
<ide> } else if (impl.myCallback instanceof FileChooser.FileChooserConsumer) {
<del> ((FileChooser.FileChooserConsumer)impl.myCallback).cancelled();
<add> //noinspection SSBasedInspection
<add> SwingUtilities.invokeLater(new Runnable() {
<add> public void run() {
<add> ((FileChooser.FileChooserConsumer)impl.myCallback).cancelled();
<add> }
<add> });
<ide> }
<ide> }
<ide> finally { |
|
Java | apache-2.0 | afdba9a455123cb8d6cdc5f42885703ab28340a0 | 0 | microg/AppleWifiNlpBackend | package org.microg.nlp.backend.apple;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.location.Location;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import org.microg.nlp.api.LocationBackendService;
import org.microg.nlp.api.LocationHelper;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
public class BackendService extends LocationBackendService {
private static final String TAG = BackendService.class.getName();
private static final long THIRTY_DAYS = 2592000000L;
private final BroadcastReceiver wifiBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (running) reportUpdate();
}
};
private final LocationRetriever retriever = new LocationRetriever();
private boolean running = false;
private VerifyingWifiLocationCalculator calculator;
private WifiLocationDatabase database;
private WifiManager wifiManager;
private Thread thread;
private Set<String> toRetrieve;
private final Runnable retrieveAction = new Runnable() {
@Override
public void run() {
while (toRetrieve != null && !toRetrieve.isEmpty()) {
if (running) {
Set<String> now = new HashSet<String>();
for (String s : toRetrieve) {
now.add(s);
if (now.size() == 10) break;
}
Log.d(TAG, "Requesting Apple for " + now.size() + " locations");
try {
Collection<Location> response = retriever.retrieveLocations(now);
WifiLocationDatabase.Editor editor = database.edit();
for (Location location : response) {
editor.put(location);
toRetrieve.remove(location.getExtras().getString(LocationRetriever.EXTRA_MAC_ADDRESS));
}
for (String mac : now) {
if (toRetrieve.contains(mac)) {
Bundle extras = new Bundle();
extras.putString(LocationRetriever.EXTRA_MAC_ADDRESS, mac);
editor.put(LocationHelper.create("unknown", System.currentTimeMillis(), extras));
toRetrieve.remove(mac);
}
}
editor.end();
// Forcing update, because new mapping data is available
reportUpdate();
} catch (Exception e) {
Log.w(TAG, e);
}
}
synchronized (thread) {
try {
thread.wait(30000);
} catch (InterruptedException e) {
break;
}
}
}
toRetrieve = null;
thread = null;
}
};
@Override
protected Location update() {
if (wifiManager != null) {
if (wifiManager.isWifiEnabled() || Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 && wifiManager.isScanAlwaysAvailable()) {
wifiManager.startScan();
}
}
return null;
}
private Location calculate() {
if (!running) {
return null;
}
Collection<ScanResult> scanResults = wifiManager.getScanResults();
Set<Location> locations = new HashSet<Location>();
Set<String> unknown = new HashSet<String>();
for (ScanResult result : scanResults) {
if (result.SSID.endsWith("_nomap")) {
// It is industry standard to ignore those APs, so we'll do the same
continue;
}
String mac = LocationRetriever.wellFormedMac(result.BSSID);
Location location = database.get(mac);
if (location != null) {
if ((location.getTime() + THIRTY_DAYS) < System.currentTimeMillis()) {
// Location is old, let's refresh it :)
unknown.add(mac);
}
location.getExtras().putInt(LocationRetriever.EXTRA_SIGNAL_LEVEL, result.level);
if (location.hasAccuracy() && location.getAccuracy() != -1) {
locations.add(location);
}
} else {
unknown.add(mac);
}
}
Log.d(TAG, "Found " + scanResults.size() + " wifis, of whom " + locations.size() + " with location and " + unknown.size() + " unknown.");
if (!unknown.isEmpty()) {
if (toRetrieve == null) {
toRetrieve = unknown;
} else {
toRetrieve.addAll(unknown);
}
}
if (thread == null) {
thread = new Thread(retrieveAction);
thread.start();
}
return calculator.calculate(locations);
}
private void reportUpdate() {
report(calculate());
}
@Override
protected void onOpen() {
database = new WifiLocationDatabase(this);
calculator = new VerifyingWifiLocationCalculator("apple", database);
wifiManager = (WifiManager) getSystemService(WIFI_SERVICE);
registerReceiver(wifiBroadcastReceiver, new IntentFilter(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION));
running = true;
}
@Override
protected void onClose() {
running = false;
unregisterReceiver(wifiBroadcastReceiver);
calculator = null;
database.close();
if (thread != null) {
thread.interrupt();
thread = null;
}
database = null;
wifiManager = null;
}
}
| src/org/microg/nlp/backend/apple/BackendService.java | package org.microg.nlp.backend.apple;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.location.Location;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import org.microg.nlp.api.LocationBackendService;
import org.microg.nlp.api.LocationHelper;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
public class BackendService extends LocationBackendService {
private static final String TAG = BackendService.class.getName();
private static final long THIRTY_DAYS = 2592000000L;
private final BroadcastReceiver wifiBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
reportUpdate();
}
};
private final LocationRetriever retriever = new LocationRetriever();
private VerifyingWifiLocationCalculator calculator;
private WifiLocationDatabase database;
private WifiManager wifiManager;
private Thread thread;
private Set<String> toRetrieve;
private final Runnable retrieveAction = new Runnable() {
@Override
public void run() {
while (toRetrieve != null && !toRetrieve.isEmpty()) {
Set<String> now = new HashSet<String>();
for (String s : toRetrieve) {
now.add(s);
if (now.size() == 10) break;
}
Log.d(TAG, "Requesting Apple for " + now.size() + " locations");
try {
Collection<Location> response = retriever.retrieveLocations(now);
WifiLocationDatabase.Editor editor = database.edit();
for (Location location : response) {
editor.put(location);
toRetrieve.remove(location.getExtras().getString(LocationRetriever.EXTRA_MAC_ADDRESS));
}
for (String mac : now) {
if (toRetrieve.contains(mac)) {
Bundle extras = new Bundle();
extras.putString(LocationRetriever.EXTRA_MAC_ADDRESS, mac);
editor.put(LocationHelper.create("unknown", System.currentTimeMillis(), extras));
toRetrieve.remove(mac);
}
}
editor.end();
// Forcing update, because new mapping data is available
reportUpdate();
} catch (Exception e) {
Log.w(TAG, e);
}
synchronized (thread) {
try {
thread.wait(30000);
} catch (InterruptedException e) {
break;
}
}
}
toRetrieve = null;
thread = null;
}
};
@Override
protected Location update() {
if (wifiManager != null) {
if (wifiManager.isWifiEnabled() || Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2 && wifiManager.isScanAlwaysAvailable()) {
wifiManager.startScan();
}
}
return null;
}
private Location calculate() {
Collection<ScanResult> scanResults = wifiManager.getScanResults();
Set<Location> locations = new HashSet<Location>();
Set<String> unknown = new HashSet<String>();
for (ScanResult result : scanResults) {
if (result.SSID.endsWith("_nomap")) {
// It is industry standard to ignore those APs, so we'll do the same
continue;
}
String mac = LocationRetriever.wellFormedMac(result.BSSID);
Location location = database.get(mac);
if (location != null) {
if ((location.getTime() + THIRTY_DAYS) < System.currentTimeMillis()) {
// Location is old, let's refresh it :)
unknown.add(mac);
}
location.getExtras().putInt(LocationRetriever.EXTRA_SIGNAL_LEVEL, result.level);
if (location.hasAccuracy() && location.getAccuracy() != -1) {
locations.add(location);
}
} else {
unknown.add(mac);
}
}
Log.d(TAG, "Found " + scanResults.size() + " wifis, of whom " + locations.size() + " with location and " + unknown.size() + " unknown.");
if (!unknown.isEmpty()) {
if (toRetrieve == null) {
toRetrieve = unknown;
} else {
toRetrieve.addAll(unknown);
}
}
if (thread == null) {
thread = new Thread(retrieveAction);
thread.start();
}
return calculator.calculate(locations);
}
private void reportUpdate() {
report(calculate());
}
@Override
protected void onOpen() {
database = new WifiLocationDatabase(this);
calculator = new VerifyingWifiLocationCalculator("apple", database);
wifiManager = (WifiManager) getSystemService(WIFI_SERVICE);
registerReceiver(wifiBroadcastReceiver, new IntentFilter(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION));
}
@Override
protected void onClose() {
unregisterReceiver(wifiBroadcastReceiver);
calculator = null;
database.close();
if (thread != null) {
thread.interrupt();
thread = null;
}
database = null;
wifiManager = null;
}
}
| Always check we're running before doing work, possibly fixes random hickups.
| src/org/microg/nlp/backend/apple/BackendService.java | Always check we're running before doing work, possibly fixes random hickups. | <ide><path>rc/org/microg/nlp/backend/apple/BackendService.java
<ide> private final BroadcastReceiver wifiBroadcastReceiver = new BroadcastReceiver() {
<ide> @Override
<ide> public void onReceive(Context context, Intent intent) {
<del> reportUpdate();
<add> if (running) reportUpdate();
<ide> }
<ide> };
<ide> private final LocationRetriever retriever = new LocationRetriever();
<add> private boolean running = false;
<ide> private VerifyingWifiLocationCalculator calculator;
<ide> private WifiLocationDatabase database;
<ide> private WifiManager wifiManager;
<ide> @Override
<ide> public void run() {
<ide> while (toRetrieve != null && !toRetrieve.isEmpty()) {
<del> Set<String> now = new HashSet<String>();
<del> for (String s : toRetrieve) {
<del> now.add(s);
<del> if (now.size() == 10) break;
<del> }
<del> Log.d(TAG, "Requesting Apple for " + now.size() + " locations");
<del> try {
<del> Collection<Location> response = retriever.retrieveLocations(now);
<del> WifiLocationDatabase.Editor editor = database.edit();
<del> for (Location location : response) {
<del> editor.put(location);
<del> toRetrieve.remove(location.getExtras().getString(LocationRetriever.EXTRA_MAC_ADDRESS));
<add> if (running) {
<add> Set<String> now = new HashSet<String>();
<add> for (String s : toRetrieve) {
<add> now.add(s);
<add> if (now.size() == 10) break;
<ide> }
<del> for (String mac : now) {
<del> if (toRetrieve.contains(mac)) {
<del> Bundle extras = new Bundle();
<del> extras.putString(LocationRetriever.EXTRA_MAC_ADDRESS, mac);
<del> editor.put(LocationHelper.create("unknown", System.currentTimeMillis(), extras));
<del> toRetrieve.remove(mac);
<add> Log.d(TAG, "Requesting Apple for " + now.size() + " locations");
<add> try {
<add> Collection<Location> response = retriever.retrieveLocations(now);
<add> WifiLocationDatabase.Editor editor = database.edit();
<add> for (Location location : response) {
<add> editor.put(location);
<add> toRetrieve.remove(location.getExtras().getString(LocationRetriever.EXTRA_MAC_ADDRESS));
<ide> }
<add> for (String mac : now) {
<add> if (toRetrieve.contains(mac)) {
<add> Bundle extras = new Bundle();
<add> extras.putString(LocationRetriever.EXTRA_MAC_ADDRESS, mac);
<add> editor.put(LocationHelper.create("unknown", System.currentTimeMillis(), extras));
<add> toRetrieve.remove(mac);
<add> }
<add> }
<add> editor.end();
<add> // Forcing update, because new mapping data is available
<add> reportUpdate();
<add> } catch (Exception e) {
<add> Log.w(TAG, e);
<ide> }
<del> editor.end();
<del> // Forcing update, because new mapping data is available
<del> reportUpdate();
<del> } catch (Exception e) {
<del> Log.w(TAG, e);
<ide> }
<ide> synchronized (thread) {
<ide> try {
<ide> }
<ide>
<ide> private Location calculate() {
<add> if (!running) {
<add> return null;
<add> }
<ide> Collection<ScanResult> scanResults = wifiManager.getScanResults();
<ide> Set<Location> locations = new HashSet<Location>();
<ide> Set<String> unknown = new HashSet<String>();
<ide> calculator = new VerifyingWifiLocationCalculator("apple", database);
<ide> wifiManager = (WifiManager) getSystemService(WIFI_SERVICE);
<ide> registerReceiver(wifiBroadcastReceiver, new IntentFilter(WifiManager.SCAN_RESULTS_AVAILABLE_ACTION));
<add> running = true;
<ide> }
<ide>
<ide> @Override
<ide> protected void onClose() {
<add> running = false;
<ide> unregisterReceiver(wifiBroadcastReceiver);
<ide> calculator = null;
<ide> database.close(); |
|
Java | bsd-2-clause | c7964a97544fa458d64f9cbb97efffce8d4d28f8 | 0 | MichaelZinsmaier/knip-contribution | /*Copyright (C) 2014 Michael Zinsmaier
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
*/
package org.knime.knip.contribution.mz;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.knime.core.node.NodeFactory;
import org.knime.core.node.NodeModel;
import org.knime.core.node.NodeSetFactory;
import org.knime.core.node.config.ConfigRO;
import org.knime.knip.contribution.mz.nodes.annotation.edit.LabelingEditorNodeFactory;
/**
* @author <a href="mailto:[email protected]">Michael Zinsmaier</a>
*/
public class ContributionMZNodeSetFactory implements NodeSetFactory {
private final Map<String, String> m_nodeFactories = new HashMap<String, String>();
/**
* {@inheritDoc}
*/
@Override
public ConfigRO getAdditionalSettings(final String id) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public String getAfterID(final String id) {
return "";
}
/**
* {@inheritDoc}
*/
@Override
public String getCategoryPath(final String id) {
return m_nodeFactories.get(id);
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
@Override
public Class<? extends NodeFactory<? extends NodeModel>> getNodeFactory(
final String id) {
try {
return (Class<? extends NodeFactory<? extends NodeModel>>) Class
.forName(id);
} catch (final ClassNotFoundException e) {
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Collection<String> getNodeFactoryIds() {
m_nodeFactories.put(
LabelingEditorNodeFactory.class.getCanonicalName(),
"/community/knip/labeling");
return m_nodeFactories.keySet();
}
}
| src/org/knime/knip/contribution/mz/ContributionMZNodeSetFactory.java | /*Copyright (C) 2014 Michael Zinsmaier
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are
* those of the authors and should not be interpreted as representing official
* policies, either expressed or implied, of any organization.
*/
package org.knime.knip.contribution.mz;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.knime.core.node.NodeFactory;
import org.knime.core.node.NodeModel;
import org.knime.core.node.NodeSetFactory;
import org.knime.core.node.config.ConfigRO;
import org.knime.knip.contribution.mz.nodes.annotation.edit.LabelingEditorNodeFactory;
/**
* @author <a href="mailto:[email protected]">Michael Zinsmaier</a>
*/
public class ContributionMZNodeSetFactory implements NodeSetFactory {
private final Map<String, String> m_nodeFactories = new HashMap<String, String>();
/**
* {@inheritDoc}
*/
@Override
public ConfigRO getAdditionalSettings(final String id) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public String getAfterID(final String id) {
return "/";
}
/**
* {@inheritDoc}
*/
@Override
public String getCategoryPath(final String id) {
return m_nodeFactories.get(id);
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
@Override
public Class<? extends NodeFactory<? extends NodeModel>> getNodeFactory(
final String id) {
try {
return (Class<? extends NodeFactory<? extends NodeModel>>) Class
.forName(id);
} catch (final ClassNotFoundException e) {
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Collection<String> getNodeFactoryIds() {
m_nodeFactories.put(
LabelingEditorNodeFactory.class.getCanonicalName(),
"/community/knip/labeling");
return m_nodeFactories.keySet();
}
}
| corrected afterId method in the factory
| src/org/knime/knip/contribution/mz/ContributionMZNodeSetFactory.java | corrected afterId method in the factory | <ide><path>rc/org/knime/knip/contribution/mz/ContributionMZNodeSetFactory.java
<ide> */
<ide> @Override
<ide> public String getAfterID(final String id) {
<del> return "/";
<add> return "";
<ide> }
<ide>
<ide> /** |
|
Java | lgpl-2.1 | f1837bbfcaf0c40425910781d266216ccfac5a71 | 0 | phoenixctms/ctsms,phoenixctms/ctsms,phoenixctms/ctsms,phoenixctms/ctsms | package org.phoenixctms.ctsms.web.model.shared;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.faces.application.FacesMessage;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.context.FacesContext;
import javax.servlet.http.HttpServletRequest;
import org.phoenixctms.ctsms.enumeration.FileModule;
import org.phoenixctms.ctsms.enumeration.JournalModule;
import org.phoenixctms.ctsms.exception.AuthenticationException;
import org.phoenixctms.ctsms.exception.AuthorisationException;
import org.phoenixctms.ctsms.exception.ServiceException;
import org.phoenixctms.ctsms.js.JsUtil;
import org.phoenixctms.ctsms.util.CommonUtil;
import org.phoenixctms.ctsms.vo.CourseOutVO;
import org.phoenixctms.ctsms.vo.FileContentInVO;
import org.phoenixctms.ctsms.vo.FileInVO;
import org.phoenixctms.ctsms.vo.FileOutVO;
import org.phoenixctms.ctsms.vo.FilePDFVO;
import org.phoenixctms.ctsms.vo.FileStreamInVO;
import org.phoenixctms.ctsms.vo.FileStreamOutVO;
import org.phoenixctms.ctsms.vo.InventoryOutVO;
import org.phoenixctms.ctsms.vo.MassMailOutVO;
import org.phoenixctms.ctsms.vo.PSFVO;
import org.phoenixctms.ctsms.vo.ProbandOutVO;
import org.phoenixctms.ctsms.vo.StaffOutVO;
import org.phoenixctms.ctsms.vo.TrialOutVO;
import org.phoenixctms.ctsms.web.model.DefaultTreeNode;
import org.phoenixctms.ctsms.web.model.FileFolderVO;
import org.phoenixctms.ctsms.web.model.IDVOTreeNode;
import org.phoenixctms.ctsms.web.model.ManagedBeanBase;
import org.phoenixctms.ctsms.web.util.DefaultSettings;
import org.phoenixctms.ctsms.web.util.GetParamNames;
import org.phoenixctms.ctsms.web.util.JSValues;
import org.phoenixctms.ctsms.web.util.MessageCodes;
import org.phoenixctms.ctsms.web.util.Messages;
import org.phoenixctms.ctsms.web.util.SettingCodes;
import org.phoenixctms.ctsms.web.util.Settings;
import org.phoenixctms.ctsms.web.util.Settings.Bundle;
import org.phoenixctms.ctsms.web.util.WebUtil;
import org.primefaces.context.RequestContext;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.event.NodeExpandEvent;
import org.primefaces.event.SelectEvent;
import org.primefaces.model.DefaultStreamedContent;
import org.primefaces.model.StreamedContent;
import org.primefaces.model.TreeNode;
import org.primefaces.model.UploadedFile;
@ManagedBean
@ViewScoped
public class FileBean extends ManagedBeanBase {
public static final String PUBLIC_FILE_PATH = "file";
public static void copyFileOutToIn(FileInVO in, FileOutVO out) {
if (in != null && out != null) {
InventoryOutVO inventoryVO = out.getInventory();
StaffOutVO staffVO = out.getStaff();
CourseOutVO courseVO = out.getCourse();
TrialOutVO trialVO = out.getTrial();
MassMailOutVO massMailVO = out.getMassMail();
ProbandOutVO probandVO = out.getProband();
in.setActive(out.getActive());
in.setPublicFile(out.getPublicFile());
in.setComment(out.getComment());
in.setCourseId(courseVO == null ? null : courseVO.getId());
in.setId(out.getId());
in.setVersion(out.getVersion());
in.setInventoryId(inventoryVO == null ? null : inventoryVO.getId());
in.setLogicalPath(out.getLogicalPath());
in.setModule(out.getModule());
in.setProbandId(probandVO == null ? null : probandVO.getId());
in.setStaffId(staffVO == null ? null : staffVO.getId());
in.setTitle(out.getTitle());
in.setTrialId(trialVO == null ? null : trialVO.getId());
in.setMassMailId(massMailVO == null ? null : massMailVO.getId());
}
}
private static TreeNode createFileRootTreeNode() {
DefaultTreeNode fileRoot = new DefaultTreeNode(new FileFolderVO(), null);
fileRoot.setType(WebUtil.FOLDER_NODE_TYPE);
fileRoot.setExpanded(true);
return fileRoot;
}
public static void initFileDefaultValues(FileInVO in, Long entityId, FileModule module) {
if (in != null) {
in.setActive(Settings.getBoolean(SettingCodes.FILE_ACTIVE_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_ACTIVE_PRESET));
in.setPublicFile(Settings.getBoolean(SettingCodes.FILE_PUBLIC_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_PUBLIC_PRESET));
in.setComment(Messages.getString(MessageCodes.FILE_COMMENT_PRESET));
in.setCourseId(FileModule.COURSE_DOCUMENT.equals(module) ? entityId : null);
in.setId(null);
in.setVersion(null);
in.setInventoryId(FileModule.INVENTORY_DOCUMENT.equals(module) ? entityId : null);
in.setLogicalPath(Settings.getString(SettingCodes.FILE_LOGICAL_PATH_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_LOGICAL_PATH_PRESET));
in.setModule(module);
in.setProbandId(FileModule.PROBAND_DOCUMENT.equals(module) ? entityId : null);
in.setStaffId(FileModule.STAFF_DOCUMENT.equals(module) ? entityId : null);
in.setTitle(Messages.getString(MessageCodes.FILE_TITLE_PRESET));
in.setTrialId(FileModule.TRIAL_DOCUMENT.equals(module) ? entityId : null);
in.setMassMailId(FileModule.MASS_MAIL_DOCUMENT.equals(module) ? entityId : null);
}
}
private static void loadFileFolderTree(TreeNode parent, FileOutVO selected, boolean select, FileModule module, Long id, String parentLogicalPath, Boolean active,
Boolean publicFile, PSFVO psf,
int depth,
boolean selectable) {
if (module != null) {
Collection<String> folders = null;
try {
folders = WebUtil.getServiceLocator().getFileService().getFileFolders(WebUtil.getAuthentication(), module, id, parentLogicalPath, false, active, publicFile, psf);
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
if (folders != null) {
Iterator<String> it = folders.iterator();
while (it.hasNext()) {
String folder = it.next();
DefaultTreeNode folderNode;
if (parent.getData() instanceof FileFolderVO) {
FileFolderVO parentFolder = ((FileFolderVO) parent.getData());
folderNode = new DefaultTreeNode(new FileFolderVO(parentLogicalPath, folder, parentFolder), parent);
parentFolder.incrementFolderCount();
} else {
folderNode = new DefaultTreeNode(new FileFolderVO(parentLogicalPath, folder, null), parent);
}
folderNode.setType(WebUtil.FOLDER_NODE_TYPE);
folderNode.setSelectable(selectable);
if (depth != 0) {
loadFileFolderTree(folderNode, selected, select, module, id, folder, active, publicFile, psf, depth - 1, selectable);
}
}
}
Collection<FileOutVO> fileVOs = null;
try {
fileVOs = WebUtil.getServiceLocator().getFileService().getFiles(WebUtil.getAuthentication(), module, id, parentLogicalPath, false, active, publicFile, psf);
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
if (fileVOs != null) {
Iterator<FileOutVO> it = fileVOs.iterator();
while (it.hasNext()) {
FileOutVO fileVO = it.next();
IDVOTreeNode fileNode = new IDVOTreeNode(fileVO, parent);
if (parent.getData() instanceof FileFolderVO) {
FileFolderVO parentFolderVO = ((FileFolderVO) parent.getData());
parentFolderVO.incrementFileCount();
parentFolderVO.addSize(fileVO.getSize());
parentFolderVO.updateModifiedTimestamp(fileVO.getModifiedTimestamp());
}
if (selected != null && selected.getId() == fileVO.getId()) {
fileNode.setSelected(select);
parent.setExpanded(true);
}
fileNode.setType(WebUtil.FILE_NODE_TYPE);
fileNode.setSelectable(selectable);
}
}
}
}
private FileInVO in;
private FileOutVO out;
private FileOutVO lastUploadedOut;
private Long entityId;
private FileModule module;
private InventoryOutVO inventory;
private StaffOutVO staff;
private CourseOutVO course;
private TrialOutVO trial;
private ProbandOutVO proband;
private MassMailOutVO massMail;
private FileContentInVO contentIn;
private FileStreamInVO streamIn;
private TreeNode fileRoot;
private String allowTypes;
private Boolean streamUploadEnabled;
private Integer uploadSizeLimit;
private Long fileCount;
private String logicalFileSystemStats;
private boolean useFileEncryption;
private String fileNameFilter;
private String fileLogicalPathFilter;
private String titleFilter;
private Boolean activeFilter;
private Boolean publicFilter;
public FileBean() {
super();
useFileEncryption = false;
fileCount = null;
logicalFileSystemStats = null;
streamUploadEnabled = null;
try {
streamUploadEnabled = WebUtil.getServiceLocator().getToolsService().isStreamUploadEnabled();
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
uploadSizeLimit = null;
try {
uploadSizeLimit = WebUtil.getServiceLocator().getToolsService().getUploadSizeLimit();
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
this.fileRoot = createFileRootTreeNode();
}
@Override
public String addAction() {
return addAction(true);
}
private String addAction(boolean init) {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return ERROR_OUTCOME;
}
FileInVO backup = new FileInVO(in);
in.setId(null);
in.setVersion(null);
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
if (streamUploadEnabled) {
out = WebUtil.getServiceLocator().getFileService().addFile(WebUtil.getAuthentication(), in, streamIn);
} else {
out = WebUtil.getServiceLocator().getFileService().addFile(WebUtil.getAuthentication(), in, contentIn);
}
if (init) {
initIn();
initSets(true);
}
addOperationSuccessMessage(MessageCodes.ADD_OPERATION_SUCCESSFUL);
return ADD_OUTCOME;
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
in.copy(backup);
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
in.copy(backup);
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
@Override
protected void appendRequestContextCallbackArgs(boolean operationSuccess) {
if (module != null) {
RequestContext requestContext;
switch (module) {
case INVENTORY_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_INVENTORY_FILE_TAB_TITLE_BASE64, JSValues.AJAX_INVENTORY_FILE_COUNT,
MessageCodes.INVENTORY_FILES_TAB_TITLE, MessageCodes.INVENTORY_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getInventoryId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_INVENTORY_JOURNAL_TAB_TITLE_BASE64,
JSValues.AJAX_INVENTORY_JOURNAL_ENTRY_COUNT, MessageCodes.INVENTORY_JOURNAL_TAB_TITLE, MessageCodes.INVENTORY_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.INVENTORY_JOURNAL, in.getInventoryId()));
}
break;
case STAFF_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_STAFF_FILE_TAB_TITLE_BASE64, JSValues.AJAX_STAFF_FILE_COUNT,
MessageCodes.STAFF_FILES_TAB_TITLE, MessageCodes.STAFF_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getStaffId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_STAFF_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_STAFF_JOURNAL_ENTRY_COUNT,
MessageCodes.STAFF_JOURNAL_TAB_TITLE, MessageCodes.STAFF_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.STAFF_JOURNAL, in.getStaffId()));
}
break;
case COURSE_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_COURSE_FILE_TAB_TITLE_BASE64, JSValues.AJAX_COURSE_FILE_COUNT,
MessageCodes.COURSE_FILES_TAB_TITLE, MessageCodes.COURSE_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getCourseId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_COURSE_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_COURSE_JOURNAL_ENTRY_COUNT,
MessageCodes.COURSE_JOURNAL_TAB_TITLE, MessageCodes.COURSE_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.COURSE_JOURNAL, in.getCourseId()));
}
break;
case TRIAL_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_TRIAL_FILE_TAB_TITLE_BASE64, JSValues.AJAX_TRIAL_FILE_COUNT,
MessageCodes.TRIAL_FILES_TAB_TITLE, MessageCodes.TRIAL_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getTrialId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_TRIAL_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_TRIAL_JOURNAL_ENTRY_COUNT,
MessageCodes.TRIAL_JOURNAL_TAB_TITLE, MessageCodes.TRIAL_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.TRIAL_JOURNAL, in.getTrialId()));
}
break;
case PROBAND_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_PROBAND_FILE_TAB_TITLE_BASE64, JSValues.AJAX_PROBAND_FILE_COUNT,
MessageCodes.PROBAND_FILES_TAB_TITLE, MessageCodes.PROBAND_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getProbandId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_PROBAND_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_PROBAND_JOURNAL_ENTRY_COUNT,
MessageCodes.PROBAND_JOURNAL_TAB_TITLE, MessageCodes.PROBAND_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.PROBAND_JOURNAL, in.getProbandId()));
}
break;
case MASS_MAIL_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_MASS_MAIL_FILE_TAB_TITLE_BASE64, JSValues.AJAX_MASS_MAIL_FILE_COUNT,
MessageCodes.MASS_MAIL_FILES_TAB_TITLE, MessageCodes.MASS_MAIL_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getMassMailId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_MASS_MAIL_JOURNAL_TAB_TITLE_BASE64,
JSValues.AJAX_MASS_MAIL_JOURNAL_ENTRY_COUNT,
MessageCodes.MASS_MAIL_JOURNAL_TAB_TITLE, MessageCodes.MASS_MAIL_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.MASS_MAIL_JOURNAL, in.getMassMailId()));
}
break;
default:
break;
}
}
}
private String changeFileAction(Long id, boolean updateTree) {
out = null;
if (id != null) {
try {
out = WebUtil.getServiceLocator().getFileService().getFile(WebUtil.getAuthentication(), id);
if (!out.isDecrypted()) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.ENCRYPTED_FILE, Long.toString(out.getId()));
out = null;
return ERROR_OUTCOME;
}
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
}
initIn();
initSets(updateTree);
return CHANGE_OUTCOME;
}
@Override
protected String changeAction(Long id) {
return changeFileAction(id, true);
}
private String changeAction(String param, FileModule module) {
out = null;
this.entityId = WebUtil.stringToLong(param);
this.module = module;
fileNameFilter = null;
fileLogicalPathFilter = null;
titleFilter = null;
activeFilter = null;
publicFilter = null;
initIn();
initSets(true);
return CHANGE_OUTCOME;
}
public void changeCourse(String param) {
actionPostProcess(changeCourseAction(param));
}
public String changeCourseAction(String param) {
return changeAction(param, FileModule.COURSE_DOCUMENT);
}
public void changeInventory(String param) {
actionPostProcess(changeInventoryAction(param));
}
public String changeInventoryAction(String param) {
return changeAction(param, FileModule.INVENTORY_DOCUMENT);
}
public void changeMassMail(String param) {
actionPostProcess(changeMassMailAction(param));
}
public String changeMassMailAction(String param) {
return changeAction(param, FileModule.MASS_MAIL_DOCUMENT);
}
public void changeProband(String param) {
actionPostProcess(changeProbandAction(param));
}
public String changeProbandAction(String param) {
return changeAction(param, FileModule.PROBAND_DOCUMENT);
}
public void changeStaff(String param) {
actionPostProcess(changeStaffAction(param));
}
public String changeStaffAction(String param) {
return changeAction(param, FileModule.STAFF_DOCUMENT);
}
public void changeTrial(String param) {
actionPostProcess(changeTrialAction(param));
}
public String changeTrialAction(String param) {
return changeAction(param, FileModule.TRIAL_DOCUMENT);
}
public List<String> completeLogicalPath(String query) {
this.in.setLogicalPath(query);
return WebUtil.completeLogicalPath(module, entityId, query);
}
public List<String> completeLogicalPathFilter(String query) {
fileLogicalPathFilter = query;
return WebUtil.completeLogicalPath(module, entityId, query);
}
private PSFVO createSFVO() {
Map<String, String> fileFilters = new HashMap<String, String>();
if (titleFilter != null && titleFilter.length() > 0) {
fileFilters.put(useFileEncryption ? WebUtil.FILE_TITLE_HASH_PSF_PROPERTY_NAME : WebUtil.FILE_TITLE_PSF_PROPERTY_NAME, titleFilter);
}
if (fileNameFilter != null && fileNameFilter.length() > 0) {
fileFilters.put(useFileEncryption ? WebUtil.FILE_NAME_HASH_PSF_PROPERTY_NAME : WebUtil.FILE_NAME_PSF_PROPERTY_NAME, fileNameFilter);
}
if (fileLogicalPathFilter != null && fileLogicalPathFilter.length() > 0) {
fileFilters.put(WebUtil.FILE_LOGICAL_PATH_PSF_PROPERTY_NAME, CommonUtil.fixLogicalPathFolderName(fileLogicalPathFilter));
}
if (activeFilter != null) {
fileFilters.put(WebUtil.FILE_ACTIVE_PSF_PROPERTY_NAME, activeFilter.toString());
}
if (publicFilter != null) {
fileFilters.put(WebUtil.FILE_PUBLIC_PSF_PROPERTY_NAME, publicFilter.toString());
}
PSFVO sf = new PSFVO();
if (!useFileEncryption) {
sf.setSortField(WebUtil.FILE_TITLE_PSF_PROPERTY_NAME);
sf.setSortOrder(true);
} else {
sf.setSortField(WebUtil.FILE_ID_PSF_PROPERTY_NAME);
sf.setSortOrder(true);
}
sf.setFilters(fileFilters);
if (Settings.getBoolean(SettingCodes.FILTER_USER_TIME_ZONE, Bundle.SETTINGS, DefaultSettings.FILTER_USER_TIME_ZONE)) {
sf.setFilterTimeZone(WebUtil.getTimeZone().getID());
}
return sf;
}
@Override
public String deleteAction() {
return deleteAction(in.getId());
}
@Override
public String deleteAction(Long id) {
try {
out = WebUtil.getServiceLocator().getFileService().deleteFile(WebUtil.getAuthentication(), id);
initIn();
initSets(true);
out = null;
addOperationSuccessMessage(MessageCodes.DELETE_OPERATION_SUCCESSFUL);
return DELETE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public final void deleteBulk() {
actionPostProcess(deleteBulkAction());
}
public String deleteBulkAction() {
try {
PSFVO sf = createSFVO();
if (sf.getFilters().size() == 0) {
throw new IllegalArgumentException(Messages.getString(MessageCodes.NO_FILE_FILTERS));
}
Collection<FileOutVO> files = WebUtil.getServiceLocator().getFileService()
.deleteFiles(WebUtil.getAuthentication(), module, entityId, null, false, null, null, sf);
long itemsLeft = sf.getRowCount() - files.size();
if (itemsLeft > 0) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.BULK_DELETE_OPERATION_INCOMPLETE, files.size(), sf.getRowCount());
} else {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_INFO, MessageCodes.BULK_DELETE_OPERATION_SUCCESSFUL, sf.getRowCount(), sf.getRowCount());
}
initIn();
initSets(true);
out = null;
return BULK_DELETE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public Boolean getActiveFilter() {
return activeFilter;
}
public StreamedContent getAggregatedPdfFilesStreamedContent() throws Exception {
if (entityId != null) {
try {
FilePDFVO aggregatedPDFFiles = WebUtil.getServiceLocator().getFileService()
.aggregatePDFFiles(WebUtil.getAuthentication(), module, entityId, null, false, null, null, createSFVO());
return new DefaultStreamedContent(new ByteArrayInputStream(aggregatedPDFFiles.getDocumentDatas()), aggregatedPDFFiles.getContentType().getMimeType(),
aggregatedPDFFiles.getFileName());
} catch (AuthenticationException e) {
WebUtil.publishException(e);
throw e;
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
throw e;
}
}
return null;
}
public String getAllowTypes() {
return allowTypes;
}
public String getFileDownloadLinkLabel() {
if (out != null) {
return out.getFileName();
} else {
return Messages.getString(MessageCodes.NO_FILE_LINK_LABEL);
}
}
public String getFileLogicalPathFilter() {
return fileLogicalPathFilter;
}
public String getFileNameFilter() {
return fileNameFilter;
}
public TreeNode getFileRoot() {
return fileRoot;
}
public StreamedContent getFileStreamedContent() throws Exception {
if (out != null) {
try {
FileStreamOutVO streamOut = WebUtil.getServiceLocator().getFileService().getFileStream(WebUtil.getAuthentication(), out.getId());
return new DefaultStreamedContent(streamOut.getStream(), streamOut.getContentType().getMimeType(), streamOut.getFileName());
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
throw e;
} catch (AuthenticationException e) {
WebUtil.publishException(e);
throw e;
}
}
return null;
}
public FileInVO getIn() {
return in;
}
public String getLogicalFileSystemStats() {
return logicalFileSystemStats;
}
@Override
public String getModifiedAnnotation() {
if (out != null) {
return WebUtil.getModifiedAnnotation(out.getVersion(), out.getModifiedUser(), out.getModifiedTimestamp());
} else {
return super.getModifiedAnnotation();
}
}
public String getNodeType(Object data) {
if (data instanceof FileOutVO) {
return WebUtil.FILE_NODE_TYPE;
} else if (data instanceof FileFolderVO) {
return WebUtil.FOLDER_NODE_TYPE;
}
return DefaultTreeNode.DEFAULT_TYPE;
}
public FileOutVO getOut() {
return out;
}
public String getPublicFileSignupUrl() {
if (out != null) {
String publicFileUrlFormat = Settings.getString(SettingCodes.PUBLIC_FILE_SIGNUP_URL, Bundle.SETTINGS, DefaultSettings.PUBLIC_FILE_SIGNUP_URL);
if (!CommonUtil.isEmptyString(publicFileUrlFormat)) {
return MessageFormat.format(publicFileUrlFormat, Long.toString(out.getId()));
}
}
return null;
}
public String getPublicFileUrl() {
if (out != null) {
HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
StringBuffer url = new StringBuffer(WebUtil.getBaseUrl(request));
url.append(request.getContextPath()).append("/").append(PUBLIC_FILE_PATH);
url.append('?').append(GetParamNames.FILE_ID.toString()).append("=").append(Long.toString(out.getId()));
return url.toString();
}
return null;
}
public Boolean getPublicFilter() {
return publicFilter;
}
public TreeNode getSelectedFile() {
return IDVOTreeNode.findNode(fileRoot, this.out);
}
@Override
public String getTitle() {
if (out != null) {
return Messages.getMessage(MessageCodes.FILE_TITLE, Long.toString(out.getId()), out.getTitle());
} else {
return Messages.getString(MessageCodes.CREATE_NEW_FILE);
}
}
public String getTitleFilter() {
return titleFilter;
}
public Integer getUploadSizeLimit() {
return uploadSizeLimit;
}
public void handleActiveFilterChanged() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleFileNameFilterKeyUp() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleFileUpload(FileUploadEvent event) {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return;
}
UploadedFile uploadedFile = event.getFile();
if (streamUploadEnabled) {
contentIn = null;
streamIn = new FileStreamInVO();
streamIn.setFileName(uploadedFile.getFileName());
streamIn.setMimeType(uploadedFile.getContentType());
streamIn.setSize(uploadedFile.getSize());
try {
streamIn.setStream(uploadedFile.getInputstream());
addOperationSuccessMessage(MessageCodes.UPLOAD_OPERATION_SUCCESSFUL);
} catch (IOException e) {
streamIn = null;
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
return;
}
} else {
streamIn = null;
contentIn = new FileContentInVO();
contentIn.setFileName(uploadedFile.getFileName());
contentIn.setMimeType(uploadedFile.getContentType());
contentIn.setDatas(uploadedFile.getContents());
addOperationSuccessMessage(MessageCodes.UPLOAD_OPERATION_SUCCESSFUL);
}
if (!(in.getTitle() != null && in.getTitle().length() > 0) || in.getTitle().equals(Messages.getString(MessageCodes.FILE_TITLE_PRESET))) {
in.setTitle(uploadedFile.getFileName());
if (isCreateable()) {
String path = CommonUtil.fixLogicalPathFolderName(in.getLogicalPath());
if (ADD_OUTCOME.equals(addAction(false))) {
lastUploadedOut = out;
out = null;
initIn();
in.setLogicalPath(path);
}
}
}
}
public void handleFileUploaded() {
updateFileFolderTree(module, entityId, lastUploadedOut, false);
lastUploadedOut = null;
updateLogicalFileSystemStats();
appendRequestContextCallbackArgs(true);
}
public void handleLogicalPathFilterSelect(SelectEvent event) {
fileLogicalPathFilter = (String) event.getObject();
updateFileFolderTree(this.module, this.entityId);
}
public void handleLogicalPathSelect(SelectEvent event) {
in.setLogicalPath((String) event.getObject());
}
public void handlePublicFilterChanged() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleTitleFilterKeyUp() {
updateFileFolderTree(this.module, this.entityId);
}
@PostConstruct
private void init() {
Long id = WebUtil.getLongParamValue(GetParamNames.FILE_ID);
if (id != null) {
this.load(id);
} else {
initIn();
initSets(true);
}
}
private void initIn() {
if (in == null) {
in = new FileInVO();
}
if (out != null) {
copyFileOutToIn(in, out);
if (in.getInventoryId() != null) {
entityId = in.getInventoryId();
module = FileModule.INVENTORY_DOCUMENT;
} else if (in.getStaffId() != null) {
entityId = in.getStaffId();
module = FileModule.STAFF_DOCUMENT;
} else if (in.getCourseId() != null) {
entityId = in.getCourseId();
module = FileModule.COURSE_DOCUMENT;
} else if (in.getTrialId() != null) {
entityId = in.getTrialId();
module = FileModule.TRIAL_DOCUMENT;
} else if (in.getProbandId() != null) {
entityId = in.getProbandId();
module = FileModule.PROBAND_DOCUMENT;
} else if (in.getMassMailId() != null) {
entityId = in.getMassMailId();
module = FileModule.MASS_MAIL_DOCUMENT;
} else {
entityId = null;
module = null;
}
} else {
initFileDefaultValues(in, entityId, module);
}
contentIn = null;
streamIn = null;
}
private void initSets(boolean updateTree) {
inventory = (FileModule.INVENTORY_DOCUMENT.equals(module) ? WebUtil.getInventory(entityId, null, null, null) : null);
staff = (FileModule.STAFF_DOCUMENT.equals(module) ? WebUtil.getStaff(entityId, null, null, null) : null);
course = (FileModule.COURSE_DOCUMENT.equals(module) ? WebUtil.getCourse(entityId, null, null, null) : null);
trial = (FileModule.TRIAL_DOCUMENT.equals(module) ? WebUtil.getTrial(entityId) : null);
proband = (FileModule.PROBAND_DOCUMENT.equals(module) ? WebUtil.getProband(entityId, null, null, null) : null);
massMail = (FileModule.MASS_MAIL_DOCUMENT.equals(module) ? WebUtil.getMassMail(entityId) : null);
useFileEncryption = CommonUtil.getUseFileEncryption(module);
lastUploadedOut = null;
if (updateTree) {
updateFileFolderTree(module, entityId);
updateLogicalFileSystemStats();
}
allowTypes = WebUtil.getAllowedFileExtensionsPattern(module, null);
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
break;
case STAFF_DOCUMENT:
break;
case COURSE_DOCUMENT:
break;
case TRIAL_DOCUMENT:
if (WebUtil.isTrialLocked(trial)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.TRIAL_LOCKED);
}
break;
case PROBAND_DOCUMENT:
if (WebUtil.isProbandLocked(proband)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.PROBAND_LOCKED);
}
break;
case MASS_MAIL_DOCUMENT:
if (WebUtil.isMassMailLocked(massMail)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.MASS_MAIL_LOCKED);
}
break;
default:
break;
}
}
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
}
}
public boolean isBulkRemovable() {
if (module != null && entityId != null && createSFVO().getFilters().size() > 0) {
switch (module) {
case TRIAL_DOCUMENT:
return !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return !WebUtil.isMassMailLocked(massMail);
default:
break;
}
return true;
}
return false;
}
@Override
public boolean isCreateable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return (in.getInventoryId() == null ? false : isFileUploaded());
case STAFF_DOCUMENT:
return (in.getStaffId() == null ? false : isFileUploaded());
case COURSE_DOCUMENT:
return (in.getCourseId() == null ? false : isFileUploaded());
case TRIAL_DOCUMENT:
return (in.getTrialId() == null ? false : isFileUploaded() && !WebUtil.isTrialLocked(trial));
case PROBAND_DOCUMENT:
return (in.getProbandId() == null ? false : isFileUploaded() && !WebUtil.isProbandLocked(proband));
case MASS_MAIL_DOCUMENT:
return (in.getMassMailId() == null ? false : isFileUploaded() && !WebUtil.isMassMailLocked(massMail));
default:
break;
}
}
return false;
}
@Override
public boolean isCreated() {
return out != null;
}
public boolean isDynamic() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return Settings.getBoolean(SettingCodes.INVENTORY_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.INVENTORY_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case STAFF_DOCUMENT:
return Settings.getBoolean(SettingCodes.STAFF_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.STAFF_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case COURSE_DOCUMENT:
return Settings.getBoolean(SettingCodes.COURSE_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.COURSE_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case TRIAL_DOCUMENT:
return Settings.getBoolean(SettingCodes.TRIAL_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.TRIAL_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case PROBAND_DOCUMENT:
return Settings.getBoolean(SettingCodes.PROBAND_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.PROBAND_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case MASS_MAIL_DOCUMENT:
return Settings.getBoolean(SettingCodes.MASS_MAIL_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.MASS_MAIL_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
default:
break;
}
}
return true;
}
@Override
public boolean isEditable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return super.isEditable();
case STAFF_DOCUMENT:
return super.isEditable();
case COURSE_DOCUMENT:
return super.isEditable();
case TRIAL_DOCUMENT:
return isCreated() && !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() && !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() && !WebUtil.isMassMailLocked(massMail);
default:
break;
}
}
return super.isEditable();
}
public boolean isFileUpdateEnabled() {
return isEditable() && isFileUploaded();
}
private boolean isFileUploaded() {
if (streamUploadEnabled != null) {
if (streamUploadEnabled) {
if (streamIn != null) {
return true;
}
} else {
if (contentIn != null) {
return true;
}
}
}
return false;
}
public boolean isInputVisible() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return true;
case STAFF_DOCUMENT:
return true;
case COURSE_DOCUMENT:
return true;
case TRIAL_DOCUMENT:
return isCreated() || !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() || !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() || !WebUtil.isMassMailLocked(massMail);
default:
return true;
}
}
return true;
}
@Override
public boolean isRemovable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return super.isRemovable();
case STAFF_DOCUMENT:
return super.isRemovable();
case COURSE_DOCUMENT:
return super.isRemovable();
case TRIAL_DOCUMENT:
return isCreated() && !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() && !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() && !WebUtil.isMassMailLocked(massMail);
default:
break;
}
}
return super.isRemovable();
}
public boolean isUseFileEncryption() {
return useFileEncryption;
}
@Override
public String loadAction() {
return loadAction(in.getId());
}
@Override
public String loadAction(Long id) {
out = null;
try {
out = WebUtil.getServiceLocator().getFileService().getFile(WebUtil.getAuthentication(), id);
if (!out.isDecrypted()) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.ENCRYPTED_FILE, Long.toString(out.getId()));
out = null;
return ERROR_OUTCOME;
}
return LOAD_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
} finally {
initIn();
initSets(true);
}
return ERROR_OUTCOME;
}
public void onNodeExpand(NodeExpandEvent event) {
TreeNode treeNode = event.getTreeNode();
if (treeNode != null) {
Iterator<TreeNode> it = treeNode.getChildren().iterator();
while (it.hasNext()) {
it.next().setParent(null);
}
treeNode.getChildren().clear();
loadFileFolderTree(treeNode, out, true, this.module, this.entityId, ((FileFolderVO) treeNode.getData()).getFolderPath(), null, null, createSFVO(), 1, true);
}
}
@Override
public String resetAction() {
out = null;
initIn();
initSets(true);
return RESET_OUTCOME;
}
public void selectFileByNode() {
Long fileId = WebUtil.getLongParamValue(GetParamNames.FILE_ID);
if (fileId != null) {
actionPostProcess(changeFileAction(fileId, false));
} else {
String logicalPath = JsUtil.decodeBase64(WebUtil.getParamValue(GetParamNames.LOGICAL_PATH));
if (logicalPath != null) {
in.setLogicalPath(logicalPath);
} else {
// since this is an actionlistener of a command request, we allow deselection explicitly
this.out = null;
this.initIn();
initSets(false);
}
}
}
public void setActiveFilter(Boolean activeFilter) {
this.activeFilter = activeFilter;
}
public void setFileLogicalPathFilter(String fileLogicalPathFilter) {
this.fileLogicalPathFilter = fileLogicalPathFilter;
}
public void setFileNameFilter(String fileNameFilter) {
this.fileNameFilter = fileNameFilter;
}
public void setPublicFilter(Boolean publicFilter) {
this.publicFilter = publicFilter;
}
public void setSelectedFile(TreeNode node) { // treetable only
if (node != null) {
node.setSelected(false);
if (node.getData() instanceof FileOutVO) {
actionPostProcess(changeFileAction(((FileOutVO) node.getData()).getId(), false));
//change(Long.toString(((FileOutVO) node.getData()).getId())); // we load the instance again, to refresh the tree hirarchy depth....
} else if (node.getData() instanceof FileFolderVO) {
in.setLogicalPath(((FileFolderVO) node.getData()).getFolderPath());
}
}
}
public void setTitleFilter(String titleFilter) {
this.titleFilter = titleFilter;
}
@Override
public String updateAction() {
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in);
initIn();
initSets(true);
addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
return UPDATE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public void updateFile() {
actionPostProcess(updateFileAction());
}
public String updateFileAction() {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return ERROR_OUTCOME;
}
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
if (streamUploadEnabled) {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in, streamIn);
} else {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in, contentIn);
}
initIn();
initSets(true);
addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
return UPDATE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
private void updateFileFolderTree(FileModule module, Long id) {
updateFileFolderTree(module, id, out, true);
}
private void updateFileFolderTree(FileModule module, Long id, FileOutVO selectedOut, boolean select) {
fileRoot.getChildren().clear();
((FileFolderVO) fileRoot.getData()).resetCounts();
loadFileFolderTree(fileRoot, selectedOut, select, module, id, CommonUtil.LOGICAL_PATH_SEPARATOR, null, null, createSFVO(), isDynamic() ? 1 : -1, true);
}
private void updateLogicalFileSystemStats() {
if (!isDynamic()) {
long totalSize = 0l;
long totalFileCount = 0l;
if (fileRoot != null) {
Iterator<TreeNode> rootChildrenIt = fileRoot.getChildren().iterator();
while (rootChildrenIt.hasNext()) {
Object data = rootChildrenIt.next().getData();
if (data instanceof FileFolderVO) {
FileFolderVO folderData = (FileFolderVO) data;
totalSize += folderData.getSize();
totalFileCount += folderData.getTotalFileCount();
} else if (data instanceof FileOutVO) {
FileOutVO fileData = (FileOutVO) data;
totalSize += fileData.getSize();
totalFileCount += 1;
}
}
}
fileCount = totalFileCount;
logicalFileSystemStats = Messages.getMessage(MessageCodes.LOGICAL_FILE_SYSTEM_STATS_LABEL, CommonUtil.humanReadableByteCount(totalSize), totalFileCount);
} else {
fileCount = null;
logicalFileSystemStats = null;
}
}
}
| web/src/main/java/org/phoenixctms/ctsms/web/model/shared/FileBean.java | package org.phoenixctms.ctsms.web.model.shared;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.faces.application.FacesMessage;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.context.FacesContext;
import javax.servlet.http.HttpServletRequest;
import org.phoenixctms.ctsms.enumeration.FileModule;
import org.phoenixctms.ctsms.enumeration.JournalModule;
import org.phoenixctms.ctsms.exception.AuthenticationException;
import org.phoenixctms.ctsms.exception.AuthorisationException;
import org.phoenixctms.ctsms.exception.ServiceException;
import org.phoenixctms.ctsms.js.JsUtil;
import org.phoenixctms.ctsms.util.CommonUtil;
import org.phoenixctms.ctsms.vo.CourseOutVO;
import org.phoenixctms.ctsms.vo.FileContentInVO;
import org.phoenixctms.ctsms.vo.FileInVO;
import org.phoenixctms.ctsms.vo.FileOutVO;
import org.phoenixctms.ctsms.vo.FilePDFVO;
import org.phoenixctms.ctsms.vo.FileStreamInVO;
import org.phoenixctms.ctsms.vo.FileStreamOutVO;
import org.phoenixctms.ctsms.vo.InventoryOutVO;
import org.phoenixctms.ctsms.vo.MassMailOutVO;
import org.phoenixctms.ctsms.vo.PSFVO;
import org.phoenixctms.ctsms.vo.ProbandOutVO;
import org.phoenixctms.ctsms.vo.StaffOutVO;
import org.phoenixctms.ctsms.vo.TrialOutVO;
import org.phoenixctms.ctsms.web.model.DefaultTreeNode;
import org.phoenixctms.ctsms.web.model.FileFolderVO;
import org.phoenixctms.ctsms.web.model.IDVOTreeNode;
import org.phoenixctms.ctsms.web.model.ManagedBeanBase;
import org.phoenixctms.ctsms.web.util.DefaultSettings;
import org.phoenixctms.ctsms.web.util.GetParamNames;
import org.phoenixctms.ctsms.web.util.JSValues;
import org.phoenixctms.ctsms.web.util.MessageCodes;
import org.phoenixctms.ctsms.web.util.Messages;
import org.phoenixctms.ctsms.web.util.SettingCodes;
import org.phoenixctms.ctsms.web.util.Settings;
import org.phoenixctms.ctsms.web.util.Settings.Bundle;
import org.phoenixctms.ctsms.web.util.WebUtil;
import org.primefaces.context.RequestContext;
import org.primefaces.event.FileUploadEvent;
import org.primefaces.event.NodeExpandEvent;
import org.primefaces.event.SelectEvent;
import org.primefaces.model.DefaultStreamedContent;
import org.primefaces.model.StreamedContent;
import org.primefaces.model.TreeNode;
import org.primefaces.model.UploadedFile;
@ManagedBean
@ViewScoped
public class FileBean extends ManagedBeanBase {
public static final String PUBLIC_FILE_PATH = "file";
public static void copyFileOutToIn(FileInVO in, FileOutVO out) {
if (in != null && out != null) {
InventoryOutVO inventoryVO = out.getInventory();
StaffOutVO staffVO = out.getStaff();
CourseOutVO courseVO = out.getCourse();
TrialOutVO trialVO = out.getTrial();
MassMailOutVO massMailVO = out.getMassMail();
ProbandOutVO probandVO = out.getProband();
in.setActive(out.getActive());
in.setPublicFile(out.getPublicFile());
in.setComment(out.getComment());
in.setCourseId(courseVO == null ? null : courseVO.getId());
in.setId(out.getId());
in.setVersion(out.getVersion());
in.setInventoryId(inventoryVO == null ? null : inventoryVO.getId());
in.setLogicalPath(out.getLogicalPath());
in.setModule(out.getModule());
in.setProbandId(probandVO == null ? null : probandVO.getId());
in.setStaffId(staffVO == null ? null : staffVO.getId());
in.setTitle(out.getTitle());
in.setTrialId(trialVO == null ? null : trialVO.getId());
in.setMassMailId(massMailVO == null ? null : massMailVO.getId());
}
}
private static TreeNode createFileRootTreeNode() {
DefaultTreeNode fileRoot = new DefaultTreeNode(new FileFolderVO(), null);
fileRoot.setType(WebUtil.FOLDER_NODE_TYPE);
fileRoot.setExpanded(true);
return fileRoot;
}
public static void initFileDefaultValues(FileInVO in, Long entityId, FileModule module) {
if (in != null) {
in.setActive(Settings.getBoolean(SettingCodes.FILE_ACTIVE_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_ACTIVE_PRESET));
in.setPublicFile(Settings.getBoolean(SettingCodes.FILE_PUBLIC_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_PUBLIC_PRESET));
in.setComment(Messages.getString(MessageCodes.FILE_COMMENT_PRESET));
in.setCourseId(FileModule.COURSE_DOCUMENT.equals(module) ? entityId : null);
in.setId(null);
in.setVersion(null);
in.setInventoryId(FileModule.INVENTORY_DOCUMENT.equals(module) ? entityId : null);
in.setLogicalPath(Settings.getString(SettingCodes.FILE_LOGICAL_PATH_PRESET, Bundle.SETTINGS, DefaultSettings.FILE_LOGICAL_PATH_PRESET));
in.setModule(module);
in.setProbandId(FileModule.PROBAND_DOCUMENT.equals(module) ? entityId : null);
in.setStaffId(FileModule.STAFF_DOCUMENT.equals(module) ? entityId : null);
in.setTitle(Messages.getString(MessageCodes.FILE_TITLE_PRESET));
in.setTrialId(FileModule.TRIAL_DOCUMENT.equals(module) ? entityId : null);
in.setMassMailId(FileModule.MASS_MAIL_DOCUMENT.equals(module) ? entityId : null);
}
}
private static void loadFileFolderTree(TreeNode parent, FileOutVO selected, boolean select, FileModule module, Long id, String parentLogicalPath, Boolean active,
Boolean publicFile, PSFVO psf,
int depth,
boolean selectable) {
if (module != null) {
Collection<String> folders = null;
try {
folders = WebUtil.getServiceLocator().getFileService().getFileFolders(WebUtil.getAuthentication(), module, id, parentLogicalPath, false, active, publicFile, psf);
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
if (folders != null) {
Iterator<String> it = folders.iterator();
while (it.hasNext()) {
String folder = it.next();
DefaultTreeNode folderNode;
if (parent.getData() instanceof FileFolderVO) {
FileFolderVO parentFolder = ((FileFolderVO) parent.getData());
folderNode = new DefaultTreeNode(new FileFolderVO(parentLogicalPath, folder, parentFolder), parent);
parentFolder.incrementFolderCount();
} else {
folderNode = new DefaultTreeNode(new FileFolderVO(parentLogicalPath, folder, null), parent);
}
folderNode.setType(WebUtil.FOLDER_NODE_TYPE);
folderNode.setSelectable(selectable);
if (depth != 0) {
loadFileFolderTree(folderNode, selected, select, module, id, folder, active, publicFile, psf, depth - 1, selectable);
}
}
}
Collection<FileOutVO> fileVOs = null;
try {
fileVOs = WebUtil.getServiceLocator().getFileService().getFiles(WebUtil.getAuthentication(), module, id, parentLogicalPath, false, active, publicFile, psf);
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
if (fileVOs != null) {
Iterator<FileOutVO> it = fileVOs.iterator();
while (it.hasNext()) {
FileOutVO fileVO = it.next();
IDVOTreeNode fileNode = new IDVOTreeNode(fileVO, parent);
if (parent.getData() instanceof FileFolderVO) {
FileFolderVO parentFolderVO = ((FileFolderVO) parent.getData());
parentFolderVO.incrementFileCount();
parentFolderVO.addSize(fileVO.getSize());
parentFolderVO.updateModifiedTimestamp(fileVO.getModifiedTimestamp());
}
if (selected != null && selected.getId() == fileVO.getId()) {
fileNode.setSelected(select);
parent.setExpanded(true);
}
fileNode.setType(WebUtil.FILE_NODE_TYPE);
fileNode.setSelectable(selectable);
}
}
}
}
private FileInVO in;
private FileOutVO out;
private FileOutVO lastUploadedOut;
private Long entityId;
private FileModule module;
private InventoryOutVO inventory;
private StaffOutVO staff;
private CourseOutVO course;
private TrialOutVO trial;
private ProbandOutVO proband;
private MassMailOutVO massMail;
private FileContentInVO contentIn;
private FileStreamInVO streamIn;
private TreeNode fileRoot;
private String allowTypes;
private Boolean streamUploadEnabled;
private Integer uploadSizeLimit;
private Long fileCount;
private String logicalFileSystemStats;
private boolean useFileEncryption;
private String fileNameFilter;
private String fileLogicalPathFilter;
private String titleFilter;
private Boolean activeFilter;
private Boolean publicFilter;
public FileBean() {
super();
useFileEncryption = false;
fileCount = null;
logicalFileSystemStats = null;
streamUploadEnabled = null;
try {
streamUploadEnabled = WebUtil.getServiceLocator().getToolsService().isStreamUploadEnabled();
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
uploadSizeLimit = null;
try {
uploadSizeLimit = WebUtil.getServiceLocator().getToolsService().getUploadSizeLimit();
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
this.fileRoot = createFileRootTreeNode();
}
@Override
public String addAction() {
return addAction(true);
}
private String addAction(boolean init) {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return ERROR_OUTCOME;
}
FileInVO backup = new FileInVO(in);
in.setId(null);
in.setVersion(null);
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
if (streamUploadEnabled) {
out = WebUtil.getServiceLocator().getFileService().addFile(WebUtil.getAuthentication(), in, streamIn);
} else {
out = WebUtil.getServiceLocator().getFileService().addFile(WebUtil.getAuthentication(), in, contentIn);
}
if (init) {
initIn();
initSets();
}
addOperationSuccessMessage(MessageCodes.ADD_OPERATION_SUCCESSFUL);
return ADD_OUTCOME;
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
in.copy(backup);
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
in.copy(backup);
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
@Override
protected void appendRequestContextCallbackArgs(boolean operationSuccess) {
if (module != null) {
RequestContext requestContext;
switch (module) {
case INVENTORY_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_INVENTORY_FILE_TAB_TITLE_BASE64, JSValues.AJAX_INVENTORY_FILE_COUNT,
MessageCodes.INVENTORY_FILES_TAB_TITLE, MessageCodes.INVENTORY_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getInventoryId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_INVENTORY_JOURNAL_TAB_TITLE_BASE64,
JSValues.AJAX_INVENTORY_JOURNAL_ENTRY_COUNT, MessageCodes.INVENTORY_JOURNAL_TAB_TITLE, MessageCodes.INVENTORY_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.INVENTORY_JOURNAL, in.getInventoryId()));
}
break;
case STAFF_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_STAFF_FILE_TAB_TITLE_BASE64, JSValues.AJAX_STAFF_FILE_COUNT,
MessageCodes.STAFF_FILES_TAB_TITLE, MessageCodes.STAFF_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getStaffId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_STAFF_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_STAFF_JOURNAL_ENTRY_COUNT,
MessageCodes.STAFF_JOURNAL_TAB_TITLE, MessageCodes.STAFF_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.STAFF_JOURNAL, in.getStaffId()));
}
break;
case COURSE_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_COURSE_FILE_TAB_TITLE_BASE64, JSValues.AJAX_COURSE_FILE_COUNT,
MessageCodes.COURSE_FILES_TAB_TITLE, MessageCodes.COURSE_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getCourseId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_COURSE_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_COURSE_JOURNAL_ENTRY_COUNT,
MessageCodes.COURSE_JOURNAL_TAB_TITLE, MessageCodes.COURSE_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.COURSE_JOURNAL, in.getCourseId()));
}
break;
case TRIAL_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_TRIAL_FILE_TAB_TITLE_BASE64, JSValues.AJAX_TRIAL_FILE_COUNT,
MessageCodes.TRIAL_FILES_TAB_TITLE, MessageCodes.TRIAL_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getTrialId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_TRIAL_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_TRIAL_JOURNAL_ENTRY_COUNT,
MessageCodes.TRIAL_JOURNAL_TAB_TITLE, MessageCodes.TRIAL_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.TRIAL_JOURNAL, in.getTrialId()));
}
break;
case PROBAND_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_PROBAND_FILE_TAB_TITLE_BASE64, JSValues.AJAX_PROBAND_FILE_COUNT,
MessageCodes.PROBAND_FILES_TAB_TITLE, MessageCodes.PROBAND_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getProbandId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_PROBAND_JOURNAL_TAB_TITLE_BASE64, JSValues.AJAX_PROBAND_JOURNAL_ENTRY_COUNT,
MessageCodes.PROBAND_JOURNAL_TAB_TITLE, MessageCodes.PROBAND_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.PROBAND_JOURNAL, in.getProbandId()));
}
break;
case MASS_MAIL_DOCUMENT:
requestContext = WebUtil.appendRequestContextCallbackTabTitleArgs(null, JSValues.AJAX_MASS_MAIL_FILE_TAB_TITLE_BASE64, JSValues.AJAX_MASS_MAIL_FILE_COUNT,
MessageCodes.MASS_MAIL_FILES_TAB_TITLE, MessageCodes.MASS_MAIL_FILES_TAB_TITLE_WITH_COUNT, fileCount);
if (operationSuccess && in.getMassMailId() != null) {
WebUtil.appendRequestContextCallbackTabTitleArgs(requestContext, JSValues.AJAX_MASS_MAIL_JOURNAL_TAB_TITLE_BASE64,
JSValues.AJAX_MASS_MAIL_JOURNAL_ENTRY_COUNT,
MessageCodes.MASS_MAIL_JOURNAL_TAB_TITLE, MessageCodes.MASS_MAIL_JOURNAL_TAB_TITLE_WITH_COUNT,
WebUtil.getJournalCount(JournalModule.MASS_MAIL_JOURNAL, in.getMassMailId()));
}
break;
default:
break;
}
}
}
@Override
protected String changeAction(Long id) {
out = null;
if (id != null) {
try {
out = WebUtil.getServiceLocator().getFileService().getFile(WebUtil.getAuthentication(), id);
if (!out.isDecrypted()) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.ENCRYPTED_FILE, Long.toString(out.getId()));
out = null;
return ERROR_OUTCOME;
}
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
} catch (AuthenticationException e) {
WebUtil.publishException(e);
}
}
initIn();
initSets();
return CHANGE_OUTCOME;
}
private String changeAction(String param, FileModule module) {
out = null;
this.entityId = WebUtil.stringToLong(param);
this.module = module;
fileNameFilter = null;
fileLogicalPathFilter = null;
titleFilter = null;
activeFilter = null;
publicFilter = null;
initIn();
initSets();
return CHANGE_OUTCOME;
}
public void changeCourse(String param) {
actionPostProcess(changeCourseAction(param));
}
public String changeCourseAction(String param) {
return changeAction(param, FileModule.COURSE_DOCUMENT);
}
public void changeInventory(String param) {
actionPostProcess(changeInventoryAction(param));
}
public String changeInventoryAction(String param) {
return changeAction(param, FileModule.INVENTORY_DOCUMENT);
}
public void changeMassMail(String param) {
actionPostProcess(changeMassMailAction(param));
}
public String changeMassMailAction(String param) {
return changeAction(param, FileModule.MASS_MAIL_DOCUMENT);
}
public void changeProband(String param) {
actionPostProcess(changeProbandAction(param));
}
public String changeProbandAction(String param) {
return changeAction(param, FileModule.PROBAND_DOCUMENT);
}
public void changeStaff(String param) {
actionPostProcess(changeStaffAction(param));
}
public String changeStaffAction(String param) {
return changeAction(param, FileModule.STAFF_DOCUMENT);
}
public void changeTrial(String param) {
actionPostProcess(changeTrialAction(param));
}
public String changeTrialAction(String param) {
return changeAction(param, FileModule.TRIAL_DOCUMENT);
}
public List<String> completeLogicalPath(String query) {
this.in.setLogicalPath(query);
return WebUtil.completeLogicalPath(module, entityId, query);
}
public List<String> completeLogicalPathFilter(String query) {
fileLogicalPathFilter = query;
return WebUtil.completeLogicalPath(module, entityId, query);
}
private PSFVO createSFVO() {
Map<String, String> fileFilters = new HashMap<String, String>();
if (titleFilter != null && titleFilter.length() > 0) {
fileFilters.put(useFileEncryption ? WebUtil.FILE_TITLE_HASH_PSF_PROPERTY_NAME : WebUtil.FILE_TITLE_PSF_PROPERTY_NAME, titleFilter);
}
if (fileNameFilter != null && fileNameFilter.length() > 0) {
fileFilters.put(useFileEncryption ? WebUtil.FILE_NAME_HASH_PSF_PROPERTY_NAME : WebUtil.FILE_NAME_PSF_PROPERTY_NAME, fileNameFilter);
}
if (fileLogicalPathFilter != null && fileLogicalPathFilter.length() > 0) {
fileFilters.put(WebUtil.FILE_LOGICAL_PATH_PSF_PROPERTY_NAME, CommonUtil.fixLogicalPathFolderName(fileLogicalPathFilter));
}
if (activeFilter != null) {
fileFilters.put(WebUtil.FILE_ACTIVE_PSF_PROPERTY_NAME, activeFilter.toString());
}
if (publicFilter != null) {
fileFilters.put(WebUtil.FILE_PUBLIC_PSF_PROPERTY_NAME, publicFilter.toString());
}
PSFVO sf = new PSFVO();
if (!useFileEncryption) {
sf.setSortField(WebUtil.FILE_TITLE_PSF_PROPERTY_NAME);
sf.setSortOrder(true);
} else {
sf.setSortField(WebUtil.FILE_ID_PSF_PROPERTY_NAME);
sf.setSortOrder(true);
}
sf.setFilters(fileFilters);
if (Settings.getBoolean(SettingCodes.FILTER_USER_TIME_ZONE, Bundle.SETTINGS, DefaultSettings.FILTER_USER_TIME_ZONE)) {
sf.setFilterTimeZone(WebUtil.getTimeZone().getID());
}
return sf;
}
@Override
public String deleteAction() {
return deleteAction(in.getId());
}
@Override
public String deleteAction(Long id) {
try {
out = WebUtil.getServiceLocator().getFileService().deleteFile(WebUtil.getAuthentication(), id);
initIn();
initSets();
out = null;
addOperationSuccessMessage(MessageCodes.DELETE_OPERATION_SUCCESSFUL);
return DELETE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public final void deleteBulk() {
actionPostProcess(deleteBulkAction());
}
public String deleteBulkAction() {
try {
PSFVO sf = createSFVO();
if (sf.getFilters().size() == 0) {
throw new IllegalArgumentException(Messages.getString(MessageCodes.NO_FILE_FILTERS));
}
Collection<FileOutVO> files = WebUtil.getServiceLocator().getFileService()
.deleteFiles(WebUtil.getAuthentication(), module, entityId, null, false, null, null, sf);
long itemsLeft = sf.getRowCount() - files.size();
if (itemsLeft > 0) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.BULK_DELETE_OPERATION_INCOMPLETE, files.size(), sf.getRowCount());
} else {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_INFO, MessageCodes.BULK_DELETE_OPERATION_SUCCESSFUL, sf.getRowCount(), sf.getRowCount());
}
initIn();
initSets();
out = null;
return BULK_DELETE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public Boolean getActiveFilter() {
return activeFilter;
}
public StreamedContent getAggregatedPdfFilesStreamedContent() throws Exception {
if (entityId != null) {
try {
FilePDFVO aggregatedPDFFiles = WebUtil.getServiceLocator().getFileService()
.aggregatePDFFiles(WebUtil.getAuthentication(), module, entityId, null, false, null, null, createSFVO());
return new DefaultStreamedContent(new ByteArrayInputStream(aggregatedPDFFiles.getDocumentDatas()), aggregatedPDFFiles.getContentType().getMimeType(),
aggregatedPDFFiles.getFileName());
} catch (AuthenticationException e) {
WebUtil.publishException(e);
throw e;
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
throw e;
}
}
return null;
}
public String getAllowTypes() {
return allowTypes;
}
public String getFileDownloadLinkLabel() {
if (out != null) {
return out.getFileName();
} else {
return Messages.getString(MessageCodes.NO_FILE_LINK_LABEL);
}
}
public String getFileLogicalPathFilter() {
return fileLogicalPathFilter;
}
public String getFileNameFilter() {
return fileNameFilter;
}
public TreeNode getFileRoot() {
return fileRoot;
}
public StreamedContent getFileStreamedContent() throws Exception {
if (out != null) {
try {
FileStreamOutVO streamOut = WebUtil.getServiceLocator().getFileService().getFileStream(WebUtil.getAuthentication(), out.getId());
return new DefaultStreamedContent(streamOut.getStream(), streamOut.getContentType().getMimeType(), streamOut.getFileName());
} catch (AuthorisationException | ServiceException | IllegalArgumentException e) {
throw e;
} catch (AuthenticationException e) {
WebUtil.publishException(e);
throw e;
}
}
return null;
}
public FileInVO getIn() {
return in;
}
public String getLogicalFileSystemStats() {
return logicalFileSystemStats;
}
@Override
public String getModifiedAnnotation() {
if (out != null) {
return WebUtil.getModifiedAnnotation(out.getVersion(), out.getModifiedUser(), out.getModifiedTimestamp());
} else {
return super.getModifiedAnnotation();
}
}
public String getNodeType(Object data) {
if (data instanceof FileOutVO) {
return WebUtil.FILE_NODE_TYPE;
} else if (data instanceof FileFolderVO) {
return WebUtil.FOLDER_NODE_TYPE;
}
return DefaultTreeNode.DEFAULT_TYPE;
}
public FileOutVO getOut() {
return out;
}
public String getPublicFileSignupUrl() {
if (out != null) {
String publicFileUrlFormat = Settings.getString(SettingCodes.PUBLIC_FILE_SIGNUP_URL, Bundle.SETTINGS, DefaultSettings.PUBLIC_FILE_SIGNUP_URL);
if (!CommonUtil.isEmptyString(publicFileUrlFormat)) {
return MessageFormat.format(publicFileUrlFormat, Long.toString(out.getId()));
}
}
return null;
}
public String getPublicFileUrl() {
if (out != null) {
HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
StringBuffer url = new StringBuffer(WebUtil.getBaseUrl(request));
url.append(request.getContextPath()).append("/").append(PUBLIC_FILE_PATH);
url.append('?').append(GetParamNames.FILE_ID.toString()).append("=").append(Long.toString(out.getId()));
return url.toString();
}
return null;
}
public Boolean getPublicFilter() {
return publicFilter;
}
public TreeNode getSelectedFile() {
return IDVOTreeNode.findNode(fileRoot, this.out);
}
@Override
public String getTitle() {
if (out != null) {
return Messages.getMessage(MessageCodes.FILE_TITLE, Long.toString(out.getId()), out.getTitle());
} else {
return Messages.getString(MessageCodes.CREATE_NEW_FILE);
}
}
public String getTitleFilter() {
return titleFilter;
}
public Integer getUploadSizeLimit() {
return uploadSizeLimit;
}
public void handleActiveFilterChanged() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleFileNameFilterKeyUp() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleFileUpload(FileUploadEvent event) {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return;
}
UploadedFile uploadedFile = event.getFile();
if (streamUploadEnabled) {
contentIn = null;
streamIn = new FileStreamInVO();
streamIn.setFileName(uploadedFile.getFileName());
streamIn.setMimeType(uploadedFile.getContentType());
streamIn.setSize(uploadedFile.getSize());
try {
streamIn.setStream(uploadedFile.getInputstream());
addOperationSuccessMessage(MessageCodes.UPLOAD_OPERATION_SUCCESSFUL);
} catch (IOException e) {
streamIn = null;
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
return;
}
} else {
streamIn = null;
contentIn = new FileContentInVO();
contentIn.setFileName(uploadedFile.getFileName());
contentIn.setMimeType(uploadedFile.getContentType());
contentIn.setDatas(uploadedFile.getContents());
addOperationSuccessMessage(MessageCodes.UPLOAD_OPERATION_SUCCESSFUL);
}
if (!(in.getTitle() != null && in.getTitle().length() > 0) || in.getTitle().equals(Messages.getString(MessageCodes.FILE_TITLE_PRESET))) {
in.setTitle(uploadedFile.getFileName());
if (isCreateable()) {
String path = CommonUtil.fixLogicalPathFolderName(in.getLogicalPath());
if (ADD_OUTCOME.equals(addAction(false))) {
lastUploadedOut = out;
out = null;
initIn();
in.setLogicalPath(path);
}
}
}
}
public void handleFileUploaded() {
updateFileFolderTree(module, entityId, lastUploadedOut, false);
lastUploadedOut = null;
updateLogicalFileSystemStats();
appendRequestContextCallbackArgs(true);
}
public void handleLogicalPathFilterSelect(SelectEvent event) {
fileLogicalPathFilter = (String) event.getObject();
updateFileFolderTree(this.module, this.entityId);
}
public void handleLogicalPathSelect(SelectEvent event) {
in.setLogicalPath((String) event.getObject());
}
public void handlePublicFilterChanged() {
updateFileFolderTree(this.module, this.entityId);
}
public void handleTitleFilterKeyUp() {
updateFileFolderTree(this.module, this.entityId);
}
@PostConstruct
private void init() {
Long id = WebUtil.getLongParamValue(GetParamNames.FILE_ID);
if (id != null) {
this.load(id);
} else {
initIn();
initSets();
}
}
private void initIn() {
if (in == null) {
in = new FileInVO();
}
if (out != null) {
copyFileOutToIn(in, out);
if (in.getInventoryId() != null) {
entityId = in.getInventoryId();
module = FileModule.INVENTORY_DOCUMENT;
} else if (in.getStaffId() != null) {
entityId = in.getStaffId();
module = FileModule.STAFF_DOCUMENT;
} else if (in.getCourseId() != null) {
entityId = in.getCourseId();
module = FileModule.COURSE_DOCUMENT;
} else if (in.getTrialId() != null) {
entityId = in.getTrialId();
module = FileModule.TRIAL_DOCUMENT;
} else if (in.getProbandId() != null) {
entityId = in.getProbandId();
module = FileModule.PROBAND_DOCUMENT;
} else if (in.getMassMailId() != null) {
entityId = in.getMassMailId();
module = FileModule.MASS_MAIL_DOCUMENT;
} else {
entityId = null;
module = null;
}
} else {
initFileDefaultValues(in, entityId, module);
}
contentIn = null;
streamIn = null;
}
private void initSets() {
inventory = (FileModule.INVENTORY_DOCUMENT.equals(module) ? WebUtil.getInventory(entityId, null, null, null) : null);
staff = (FileModule.STAFF_DOCUMENT.equals(module) ? WebUtil.getStaff(entityId, null, null, null) : null);
course = (FileModule.COURSE_DOCUMENT.equals(module) ? WebUtil.getCourse(entityId, null, null, null) : null);
trial = (FileModule.TRIAL_DOCUMENT.equals(module) ? WebUtil.getTrial(entityId) : null);
proband = (FileModule.PROBAND_DOCUMENT.equals(module) ? WebUtil.getProband(entityId, null, null, null) : null);
massMail = (FileModule.MASS_MAIL_DOCUMENT.equals(module) ? WebUtil.getMassMail(entityId) : null);
useFileEncryption = CommonUtil.getUseFileEncryption(module);
lastUploadedOut = null;
updateFileFolderTree(module, entityId);
updateLogicalFileSystemStats();
allowTypes = WebUtil.getAllowedFileExtensionsPattern(module, null);
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
break;
case STAFF_DOCUMENT:
break;
case COURSE_DOCUMENT:
break;
case TRIAL_DOCUMENT:
if (WebUtil.isTrialLocked(trial)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.TRIAL_LOCKED);
}
break;
case PROBAND_DOCUMENT:
if (WebUtil.isProbandLocked(proband)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.PROBAND_LOCKED);
}
break;
case MASS_MAIL_DOCUMENT:
if (WebUtil.isMassMailLocked(massMail)) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_WARN, MessageCodes.MASS_MAIL_LOCKED);
}
break;
default:
break;
}
}
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
}
}
public boolean isBulkRemovable() {
if (module != null && entityId != null && createSFVO().getFilters().size() > 0) {
switch (module) {
case TRIAL_DOCUMENT:
return !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return !WebUtil.isMassMailLocked(massMail);
default:
break;
}
return true;
}
return false;
}
@Override
public boolean isCreateable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return (in.getInventoryId() == null ? false : isFileUploaded());
case STAFF_DOCUMENT:
return (in.getStaffId() == null ? false : isFileUploaded());
case COURSE_DOCUMENT:
return (in.getCourseId() == null ? false : isFileUploaded());
case TRIAL_DOCUMENT:
return (in.getTrialId() == null ? false : isFileUploaded() && !WebUtil.isTrialLocked(trial));
case PROBAND_DOCUMENT:
return (in.getProbandId() == null ? false : isFileUploaded() && !WebUtil.isProbandLocked(proband));
case MASS_MAIL_DOCUMENT:
return (in.getMassMailId() == null ? false : isFileUploaded() && !WebUtil.isMassMailLocked(massMail));
default:
break;
}
}
return false;
}
@Override
public boolean isCreated() {
return out != null;
}
public boolean isDynamic() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return Settings.getBoolean(SettingCodes.INVENTORY_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.INVENTORY_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case STAFF_DOCUMENT:
return Settings.getBoolean(SettingCodes.STAFF_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.STAFF_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case COURSE_DOCUMENT:
return Settings.getBoolean(SettingCodes.COURSE_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.COURSE_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case TRIAL_DOCUMENT:
return Settings.getBoolean(SettingCodes.TRIAL_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.TRIAL_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case PROBAND_DOCUMENT:
return Settings.getBoolean(SettingCodes.PROBAND_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.PROBAND_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
case MASS_MAIL_DOCUMENT:
return Settings.getBoolean(SettingCodes.MASS_MAIL_FILE_FOLDER_TREE_DYNAMIC, Bundle.SETTINGS, DefaultSettings.MASS_MAIL_FILE_FOLDER_TREE_DYNAMIC_DEFAULT);
default:
break;
}
}
return true;
}
@Override
public boolean isEditable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return super.isEditable();
case STAFF_DOCUMENT:
return super.isEditable();
case COURSE_DOCUMENT:
return super.isEditable();
case TRIAL_DOCUMENT:
return isCreated() && !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() && !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() && !WebUtil.isMassMailLocked(massMail);
default:
break;
}
}
return super.isEditable();
}
public boolean isFileUpdateEnabled() {
return isEditable() && isFileUploaded();
}
private boolean isFileUploaded() {
if (streamUploadEnabled != null) {
if (streamUploadEnabled) {
if (streamIn != null) {
return true;
}
} else {
if (contentIn != null) {
return true;
}
}
}
return false;
}
public boolean isInputVisible() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return true;
case STAFF_DOCUMENT:
return true;
case COURSE_DOCUMENT:
return true;
case TRIAL_DOCUMENT:
return isCreated() || !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() || !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() || !WebUtil.isMassMailLocked(massMail);
default:
return true;
}
}
return true;
}
@Override
public boolean isRemovable() {
if (module != null) {
switch (module) {
case INVENTORY_DOCUMENT:
return super.isRemovable();
case STAFF_DOCUMENT:
return super.isRemovable();
case COURSE_DOCUMENT:
return super.isRemovable();
case TRIAL_DOCUMENT:
return isCreated() && !WebUtil.isTrialLocked(trial);
case PROBAND_DOCUMENT:
return isCreated() && !WebUtil.isProbandLocked(proband);
case MASS_MAIL_DOCUMENT:
return isCreated() && !WebUtil.isMassMailLocked(massMail);
default:
break;
}
}
return super.isRemovable();
}
public boolean isUseFileEncryption() {
return useFileEncryption;
}
@Override
public String loadAction() {
return loadAction(in.getId());
}
@Override
public String loadAction(Long id) {
out = null;
try {
out = WebUtil.getServiceLocator().getFileService().getFile(WebUtil.getAuthentication(), id);
if (!out.isDecrypted()) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.ENCRYPTED_FILE, Long.toString(out.getId()));
out = null;
return ERROR_OUTCOME;
}
return LOAD_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
} finally {
initIn();
initSets();
}
return ERROR_OUTCOME;
}
public void onNodeExpand(NodeExpandEvent event) {
TreeNode treeNode = event.getTreeNode();
if (treeNode != null) {
Iterator<TreeNode> it = treeNode.getChildren().iterator();
while (it.hasNext()) {
it.next().setParent(null);
}
treeNode.getChildren().clear();
loadFileFolderTree(treeNode, out, true, this.module, this.entityId, ((FileFolderVO) treeNode.getData()).getFolderPath(), null, null, createSFVO(), 1, true);
}
}
@Override
public String resetAction() {
out = null;
initIn();
initSets();
return RESET_OUTCOME;
}
public void selectFileByNode() {
Long fileId = WebUtil.getLongParamValue(GetParamNames.FILE_ID);
if (fileId != null) {
change(fileId.toString());
} else {
String logicalPath = JsUtil.decodeBase64(WebUtil.getParamValue(GetParamNames.LOGICAL_PATH));
if (logicalPath != null) {
in.setLogicalPath(logicalPath);
} else {
// since this is an actionlistener of a command request, we allow deselection explicitly
this.out = null;
this.initIn();
initSets();
}
}
}
public void setActiveFilter(Boolean activeFilter) {
this.activeFilter = activeFilter;
}
public void setFileLogicalPathFilter(String fileLogicalPathFilter) {
this.fileLogicalPathFilter = fileLogicalPathFilter;
}
public void setFileNameFilter(String fileNameFilter) {
this.fileNameFilter = fileNameFilter;
}
public void setPublicFilter(Boolean publicFilter) {
this.publicFilter = publicFilter;
}
public void setSelectedFile(TreeNode node) { // treetable only
if (node != null) {
node.setSelected(false);
if (node.getData() instanceof FileOutVO) {
change(Long.toString(((FileOutVO) node.getData()).getId())); // we load the instance again, to refresh the tree hirarchy depth....
} else if (node.getData() instanceof FileFolderVO) {
in.setLogicalPath(((FileFolderVO) node.getData()).getFolderPath());
}
}
}
public void setTitleFilter(String titleFilter) {
this.titleFilter = titleFilter;
}
@Override
public String updateAction() {
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in);
initIn();
initSets();
addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
return UPDATE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
public void updateFile() {
actionPostProcess(updateFileAction());
}
public String updateFileAction() {
if (streamUploadEnabled == null) {
Messages.addLocalizedMessage(FacesMessage.SEVERITY_ERROR, MessageCodes.FILE_STREAM_UPLOAD_MODE_UNDEFINED);
return ERROR_OUTCOME;
}
in.setLogicalPath(CommonUtil.fixLogicalPathFolderName(in.getLogicalPath()));
try {
if (streamUploadEnabled) {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in, streamIn);
} else {
out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in, contentIn);
}
initIn();
initSets();
addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
return UPDATE_OUTCOME;
} catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
} catch (AuthenticationException e) {
Messages.addMessage(FacesMessage.SEVERITY_ERROR, e.getMessage());
WebUtil.publishException(e);
}
return ERROR_OUTCOME;
}
private void updateFileFolderTree(FileModule module, Long id) {
updateFileFolderTree(module, id, out, true);
}
private void updateFileFolderTree(FileModule module, Long id, FileOutVO selectedOut, boolean select) {
fileRoot.getChildren().clear();
((FileFolderVO) fileRoot.getData()).resetCounts();
loadFileFolderTree(fileRoot, selectedOut, select, module, id, CommonUtil.LOGICAL_PATH_SEPARATOR, null, null, createSFVO(), isDynamic() ? 1 : -1, true);
}
private void updateLogicalFileSystemStats() {
if (!isDynamic()) {
long totalSize = 0l;
long totalFileCount = 0l;
if (fileRoot != null) {
Iterator<TreeNode> rootChildrenIt = fileRoot.getChildren().iterator();
while (rootChildrenIt.hasNext()) {
Object data = rootChildrenIt.next().getData();
if (data instanceof FileFolderVO) {
FileFolderVO folderData = (FileFolderVO) data;
totalSize += folderData.getSize();
totalFileCount += folderData.getTotalFileCount();
} else if (data instanceof FileOutVO) {
FileOutVO fileData = (FileOutVO) data;
totalSize += fileData.getSize();
totalFileCount += 1;
}
}
}
fileCount = totalFileCount;
logicalFileSystemStats = Messages.getMessage(MessageCodes.LOGICAL_FILE_SYSTEM_STATS_LABEL, CommonUtil.humanReadableByteCount(totalSize), totalFileCount);
} else {
fileCount = null;
logicalFileSystemStats = null;
}
}
}
| "File" tab: prevent reloading tree when selecting a file | web/src/main/java/org/phoenixctms/ctsms/web/model/shared/FileBean.java | "File" tab: prevent reloading tree when selecting a file | <ide><path>eb/src/main/java/org/phoenixctms/ctsms/web/model/shared/FileBean.java
<ide> }
<ide> if (init) {
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> }
<ide> addOperationSuccessMessage(MessageCodes.ADD_OPERATION_SUCCESSFUL);
<ide> return ADD_OUTCOME;
<ide> }
<ide> }
<ide>
<del> @Override
<del> protected String changeAction(Long id) {
<add> private String changeFileAction(Long id, boolean updateTree) {
<ide> out = null;
<ide> if (id != null) {
<ide> try {
<ide> }
<ide> }
<ide> initIn();
<del> initSets();
<add> initSets(updateTree);
<ide> return CHANGE_OUTCOME;
<add> }
<add>
<add> @Override
<add> protected String changeAction(Long id) {
<add> return changeFileAction(id, true);
<ide> }
<ide>
<ide> private String changeAction(String param, FileModule module) {
<ide> activeFilter = null;
<ide> publicFilter = null;
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> return CHANGE_OUTCOME;
<ide> }
<ide>
<ide> try {
<ide> out = WebUtil.getServiceLocator().getFileService().deleteFile(WebUtil.getAuthentication(), id);
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> out = null;
<ide> addOperationSuccessMessage(MessageCodes.DELETE_OPERATION_SUCCESSFUL);
<ide> return DELETE_OUTCOME;
<ide> Messages.addLocalizedMessage(FacesMessage.SEVERITY_INFO, MessageCodes.BULK_DELETE_OPERATION_SUCCESSFUL, sf.getRowCount(), sf.getRowCount());
<ide> }
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> out = null;
<ide> return BULK_DELETE_OUTCOME;
<ide> } catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
<ide> this.load(id);
<ide> } else {
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> }
<ide> }
<ide>
<ide> streamIn = null;
<ide> }
<ide>
<del> private void initSets() {
<add> private void initSets(boolean updateTree) {
<ide> inventory = (FileModule.INVENTORY_DOCUMENT.equals(module) ? WebUtil.getInventory(entityId, null, null, null) : null);
<ide> staff = (FileModule.STAFF_DOCUMENT.equals(module) ? WebUtil.getStaff(entityId, null, null, null) : null);
<ide> course = (FileModule.COURSE_DOCUMENT.equals(module) ? WebUtil.getCourse(entityId, null, null, null) : null);
<ide> massMail = (FileModule.MASS_MAIL_DOCUMENT.equals(module) ? WebUtil.getMassMail(entityId) : null);
<ide> useFileEncryption = CommonUtil.getUseFileEncryption(module);
<ide> lastUploadedOut = null;
<del> updateFileFolderTree(module, entityId);
<del> updateLogicalFileSystemStats();
<add> if (updateTree) {
<add> updateFileFolderTree(module, entityId);
<add> updateLogicalFileSystemStats();
<add> }
<ide> allowTypes = WebUtil.getAllowedFileExtensionsPattern(module, null);
<ide> if (module != null) {
<ide> switch (module) {
<ide> WebUtil.publishException(e);
<ide> } finally {
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> }
<ide> return ERROR_OUTCOME;
<ide> }
<ide> public String resetAction() {
<ide> out = null;
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> return RESET_OUTCOME;
<ide> }
<ide>
<ide> public void selectFileByNode() {
<ide> Long fileId = WebUtil.getLongParamValue(GetParamNames.FILE_ID);
<ide> if (fileId != null) {
<del> change(fileId.toString());
<add> actionPostProcess(changeFileAction(fileId, false));
<ide> } else {
<ide> String logicalPath = JsUtil.decodeBase64(WebUtil.getParamValue(GetParamNames.LOGICAL_PATH));
<ide> if (logicalPath != null) {
<ide> // since this is an actionlistener of a command request, we allow deselection explicitly
<ide> this.out = null;
<ide> this.initIn();
<del> initSets();
<add> initSets(false);
<ide> }
<ide> }
<ide> }
<ide> if (node != null) {
<ide> node.setSelected(false);
<ide> if (node.getData() instanceof FileOutVO) {
<del> change(Long.toString(((FileOutVO) node.getData()).getId())); // we load the instance again, to refresh the tree hirarchy depth....
<add> actionPostProcess(changeFileAction(((FileOutVO) node.getData()).getId(), false));
<add> //change(Long.toString(((FileOutVO) node.getData()).getId())); // we load the instance again, to refresh the tree hirarchy depth....
<ide> } else if (node.getData() instanceof FileFolderVO) {
<ide> in.setLogicalPath(((FileFolderVO) node.getData()).getFolderPath());
<ide> }
<ide> try {
<ide> out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in);
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
<ide> return UPDATE_OUTCOME;
<ide> } catch (ServiceException | AuthorisationException | IllegalArgumentException e) {
<ide> out = WebUtil.getServiceLocator().getFileService().updateFile(WebUtil.getAuthentication(), in, contentIn);
<ide> }
<ide> initIn();
<del> initSets();
<add> initSets(true);
<ide> addOperationSuccessMessage(MessageCodes.UPDATE_OPERATION_SUCCESSFUL);
<ide> return UPDATE_OUTCOME;
<ide> } catch (ServiceException | AuthorisationException | IllegalArgumentException e) { |
|
Java | apache-2.0 | e1663e9e156a820eb5ccbbef5a14bb297a12b5cb | 0 | ChrisLMerrill/muse,ChrisLMerrill/muse | package org.musetest.core.test.plugins;
import org.musetest.core.*;
import org.musetest.core.datacollection.*;
import org.musetest.core.events.*;
import org.musetest.core.execution.*;
import org.musetest.core.plugins.*;
import org.musetest.core.resource.generic.*;
import org.musetest.core.values.*;
import javax.annotation.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class TestResultCollector extends GenericConfigurableTestPlugin implements DataCollector
{
public TestResultCollector(GenericResourceConfiguration configuration)
{
super(configuration);
}
@Override
public List<TestResultData> getData()
{
return Collections.singletonList(_result);
}
public TestResult getResult()
{
return _result;
}
@Override
public void initialize(MuseExecutionContext context) throws MuseExecutionError
{
Boolean fail_on_error = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_ERROR, false, context.getProject()), context, true, Boolean.class);
if (fail_on_error != null)
_fail_on_error = fail_on_error;
Boolean fail_on_failure = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_FAILURE, false, context.getProject()), context, true, Boolean.class);
if (fail_on_failure != null)
_fail_on_failure = fail_on_failure;
Boolean fail_on_interrupt = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_INTERRUPT, false, context.getProject()), context, true, Boolean.class);
if (fail_on_interrupt != null)
_fail_on_interrupt = fail_on_interrupt;
context.addEventListener(new MuseEventListener()
{
@Override
public void eventRaised(MuseEvent event)
{
if (event.getTypeId().equals(StartTestEventType.TYPE_ID))
{
_result = new TestResult();
_result.setTestId(event.getAttributeAsString(StartTestEventType.TEST_ID));
_result.setName(event.getAttributeAsString(StartTestEventType.TEST_NAME));
}
else if (event.getTypeId().equals(EndTestEventType.TYPE_ID))
{
context.removeEventListener(this);
String summary = "Test completed successfully";
if (_result.getFailures().size() > 0)
summary = String.format("Test failed with %d failure(s) and %d error(s).", countFailures(_result, TestResult.FailureType.Failure), countFailures(_result, TestResult.FailureType.Error));
_result.setSummary(summary);
}
else if (event.getTypeId().equals(InterruptedEventType.TYPE_ID) && _fail_on_interrupt)
{
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Interrupted, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
_result.setPass(false);
}
else
{
if (event.hasTag(MuseEvent.FAILURE) && _fail_on_failure)
{
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Failure, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
_result.setPass(false);
}
else if (event.hasTag(MuseEvent.ERROR) && _fail_on_error)
{
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Error, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
_result.setPass(false);
}
}
}
});
}
private int countFailures(TestResult result, TestResult.FailureType type)
{
int count = 0;
for (TestResult.Failure failure : result.getFailures())
if (failure.getType().equals(type))
count++;
return count;
}
private boolean _fail_on_failure = true;
private boolean _fail_on_error = true;
private boolean _fail_on_interrupt = true;
private TestResult _result = new TestResult();
}
| core/src/main/java/org/musetest/core/test/plugins/TestResultCollector.java | package org.musetest.core.test.plugins;
import org.musetest.core.*;
import org.musetest.core.datacollection.*;
import org.musetest.core.events.*;
import org.musetest.core.execution.*;
import org.musetest.core.plugins.*;
import org.musetest.core.resource.generic.*;
import org.musetest.core.values.*;
import javax.annotation.*;
import java.util.*;
/**
* @author Christopher L Merrill (see LICENSE.txt for license details)
*/
public class TestResultCollector extends GenericConfigurableTestPlugin implements DataCollector
{
public TestResultCollector(GenericResourceConfiguration configuration)
{
super(configuration);
}
@Override
public List<TestResultData> getData()
{
return Collections.singletonList(_result);
}
public TestResult getResult()
{
return _result;
}
@Override
public void initialize(MuseExecutionContext context) throws MuseExecutionError
{
Boolean fail_on_error = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_ERROR, false, context.getProject()), context, true, Boolean.class);
if (fail_on_error != null)
_fail_on_error = fail_on_error;
Boolean fail_on_failure = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_FAILURE, false, context.getProject()), context, true, Boolean.class);
if (fail_on_failure != null)
_fail_on_failure = fail_on_failure;
Boolean fail_on_interrupt = BaseValueSource.getValue(BaseValueSource.getValueSource(_configuration.parameters(), TestResultCollectorConfiguration.FAIL_ON_INTERRUPT, false, context.getProject()), context, true, Boolean.class);
if (fail_on_interrupt != null)
_fail_on_interrupt = fail_on_interrupt;
context.addEventListener(new MuseEventListener()
{
@Override
public void eventRaised(MuseEvent event)
{
if (event.getTypeId().equals(StartTestEventType.TYPE_ID))
{
_result.setTestId(event.getAttributeAsString(StartTestEventType.TEST_ID));
_result.setName(event.getAttributeAsString(StartTestEventType.TEST_NAME));
}
else if (event.getTypeId().equals(EndTestEventType.TYPE_ID))
{
context.removeEventListener(this);
String summary = "Test completed successfully";
if (_result.getFailures().size() > 0)
summary = String.format("Test failed with %d failure(s) and %d error(s).", countFailures(_result, TestResult.FailureType.Failure), countFailures(_result, TestResult.FailureType.Error));
_result.setSummary(summary);
}
else if (event.getTypeId().equals(InterruptedEventType.TYPE_ID) && _fail_on_interrupt)
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Interrupted, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
else
{
if (event.hasTag(MuseEvent.FAILURE) && _fail_on_failure)
{
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Failure, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
_result.setPass(false);
}
else if (event.hasTag(MuseEvent.ERROR) && _fail_on_error)
{
_result.addFailure(new TestResult.Failure(TestResult.FailureType.Error, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
_result.setPass(false);
}
}
}
});
}
private int countFailures(TestResult result, TestResult.FailureType type)
{
int count = 0;
for (TestResult.Failure failure : result.getFailures())
if (failure.getType().equals(type))
count++;
return count;
}
private boolean _fail_on_failure = true;
private boolean _fail_on_error = true;
private boolean _fail_on_interrupt = true;
private TestResult _result = new TestResult();
}
| TestResultCollector now correctly fails a test on interrupt (if configured)
| core/src/main/java/org/musetest/core/test/plugins/TestResultCollector.java | TestResultCollector now correctly fails a test on interrupt (if configured) | <ide><path>ore/src/main/java/org/musetest/core/test/plugins/TestResultCollector.java
<ide> {
<ide> if (event.getTypeId().equals(StartTestEventType.TYPE_ID))
<ide> {
<add> _result = new TestResult();
<ide> _result.setTestId(event.getAttributeAsString(StartTestEventType.TEST_ID));
<ide> _result.setName(event.getAttributeAsString(StartTestEventType.TEST_NAME));
<ide> }
<ide> _result.setSummary(summary);
<ide> }
<ide> else if (event.getTypeId().equals(InterruptedEventType.TYPE_ID) && _fail_on_interrupt)
<del> _result.addFailure(new TestResult.Failure(TestResult.FailureType.Interrupted, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
<del> else
<add> {
<add> _result.addFailure(new TestResult.Failure(TestResult.FailureType.Interrupted, event.getAttributeAsString(MuseEvent.DESCRIPTION)));
<add> _result.setPass(false);
<add> }
<add> else
<ide> {
<ide> if (event.hasTag(MuseEvent.FAILURE) && _fail_on_failure)
<ide> { |
|
Java | mit | error: pathspec 'demo/net/poczone/blobstorage/client/BlobStorageClientDemo.java' did not match any file(s) known to git
| 36f295c6490455660e044f366270d5673c27a158 | 1 | poczone/blobstorage | package net.poczone.blobstorage.client;
import java.io.IOException;
import net.poczone.blobstorage.shared.Blob;
import net.poczone.blobstorage.shared.Login;
public class BlobStorageClientDemo {
public static void main(String[] args) throws IOException {
String base = BlobStorageClient.BASE_POCZONE;
Login login = new Login("user", "pass");
BlobStorageClient client = new BlobStorageClient(base, login);
String path = "a/b/demo.txt";
String mimeType = "text/plain";
byte[] data = "I was here.".getBytes();
System.out.println("POST blob to " + path + "...");
Blob blob = new Blob(path, mimeType, data);
client.post(blob);
System.out.println("=> Done.");
System.out.println("GET blob from " + path + "...");
Blob blob2 = client.get(path);
if (blob2.exists()) {
byte[] data2 = blob2.getData();
System.out.println("=> Text: " + new String(data2));
}
System.out.println("DELETE blob from " + path);
boolean deleted = client.delete(path);
System.out.println("=> Deleted: " + deleted);
}
}
| demo/net/poczone/blobstorage/client/BlobStorageClientDemo.java | Demo Code | demo/net/poczone/blobstorage/client/BlobStorageClientDemo.java | Demo Code | <ide><path>emo/net/poczone/blobstorage/client/BlobStorageClientDemo.java
<add>package net.poczone.blobstorage.client;
<add>
<add>import java.io.IOException;
<add>
<add>import net.poczone.blobstorage.shared.Blob;
<add>import net.poczone.blobstorage.shared.Login;
<add>
<add>public class BlobStorageClientDemo {
<add> public static void main(String[] args) throws IOException {
<add> String base = BlobStorageClient.BASE_POCZONE;
<add> Login login = new Login("user", "pass");
<add>
<add> BlobStorageClient client = new BlobStorageClient(base, login);
<add>
<add> String path = "a/b/demo.txt";
<add> String mimeType = "text/plain";
<add> byte[] data = "I was here.".getBytes();
<add>
<add> System.out.println("POST blob to " + path + "...");
<add> Blob blob = new Blob(path, mimeType, data);
<add> client.post(blob);
<add> System.out.println("=> Done.");
<add>
<add> System.out.println("GET blob from " + path + "...");
<add> Blob blob2 = client.get(path);
<add> if (blob2.exists()) {
<add> byte[] data2 = blob2.getData();
<add>
<add> System.out.println("=> Text: " + new String(data2));
<add> }
<add>
<add> System.out.println("DELETE blob from " + path);
<add> boolean deleted = client.delete(path);
<add> System.out.println("=> Deleted: " + deleted);
<add> }
<add>} |
|
Java | mit | 066ba51f2e2db023bdcbbff3a384fee3f0f51eae | 0 | kmdouglass/Micro-Manager,kmdouglass/Micro-Manager | ///////////////////////////////////////////////////////////////////////////////
//FILE: PatternOverlayFrame.java
//PROJECT: Micro-Manager
//SUBSYSTEM: Image Overlay plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Jon Daniels
//
// COPYRIGHT: Applied Scientific Instrumentation, 2014
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.patternoverlay;
import com.google.common.eventbus.Subscribe;
import ij.ImagePlus;
import ij.gui.ImageWindow;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.JToggleButton;
import javax.swing.JLabel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.micromanager.MMStudio;
import org.micromanager.api.ScriptInterface;
import org.micromanager.utils.MMFrame;
import net.miginfocom.swing.MigLayout;
import org.micromanager.events.DisplayCreatedEvent;
/**
* The plugin window.
* Enables the user to set preferences like overlay type and size.
*
* To make a new pattern:
* 1. Derive from class GenericOverlay and implement getRoi() which
* creates and returns an ImageJ Roi object for the overlay.
* See existing overlays as examples
* (e.g. CrosshairOverlay.java, GridOverlay.java).
* 2. Add an entry to the OverayOption.Keys enum
* 3. Create/add the object along with associated key to the overlayModel
* variable in the constructor of PatternOverlayFrame.
*
* @author Matthijs
* @author Jon
*/
@SuppressWarnings("LeakingThisInConstructor")
public class PatternOverlayFrame extends MMFrame {
private final ScriptInterface gui_;
private final JComboBox overlayBox_;
private final JToggleButton toggleButton_;
private final JSlider sizeSlider_;
private final JComboBox colorBox_;
private final MMFrame ourFrame_ = this;
private GenericOverlay lastOverlay_;
public PatternOverlayFrame(ScriptInterface gui) {
this.setLayout(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
gui_ = gui;
loadAndRestorePosition(100, 100, WIDTH, WIDTH);
lastOverlay_ = null;
add(new JLabel("Type:"));
overlayBox_ = new JComboBox();
add(overlayBox_, "wrap");
DefaultComboBoxModel overlayModel = new DefaultComboBoxModel();
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.CROSSHAIR,
new CrosshairOverlay(getPrefsNode(), OverlayOption.Keys.CROSSHAIR.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.GRID,
new GridOverlay(getPrefsNode(), OverlayOption.Keys.GRID.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.CIRCLE,
new CircleOverlay(getPrefsNode(), OverlayOption.Keys.CIRCLE.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.TARGET,
new TargetOverlay(getPrefsNode(), OverlayOption.Keys.TARGET.toString() + "_")));
overlayBox_.setModel(overlayModel);
overlayBox_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
getPrefsNode().putInt(Constants.TYPE_BOX_IDX, overlayBox_.getSelectedIndex());
updateToggleButtonLabel();
GenericOverlay currentOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
try {
// turn off the last-used overlay
if (lastOverlay_ != null) {
lastOverlay_.setVisible(false);
}
currentOverlay.setVisible(toggleButton_.isSelected());
sizeSlider_.setValue(currentOverlay.getSize());
sizeSlider_.repaint();
colorBox_.setSelectedIndex(currentOverlay.getColorCode());
} catch (Exception e1) {
gui_.showError(e1, ourFrame_);
}
lastOverlay_ = currentOverlay;
}
});
toggleButton_ = new JToggleButton();
add(toggleButton_, "span 2, wrap, growx");
toggleButton_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
toggleOverlay(null);
}
});
add(new JLabel("Size:"));
sizeSlider_ = new JSlider();
add(sizeSlider_, "wrap, width ::80");
sizeSlider_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
((OverlayOption) overlayBox_.getSelectedItem()).getOverlay().setSize(sizeSlider_.getValue());
// pref save handled by GenericOverlay
}
});
add(new JLabel("Color:"));
colorBox_ = new JComboBox();
add(colorBox_, "wrap");
DefaultComboBoxModel colorModel = new DefaultComboBoxModel(Constants.COLOR_OPTION_ARRAY);
colorBox_.setModel(colorModel);
colorBox_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
((OverlayOption) overlayBox_.getSelectedItem()).getOverlay().setColorCode(colorBox_.getSelectedIndex());
// pref save handled by GenericOverlay
}
});
// setting this from prefs needs to come after toggle button is created
// and also color and size boxes because all are referenced by ActionListener
overlayBox_.setSelectedIndex(getPrefsNode().getInt(Constants.TYPE_BOX_IDX, 0));
updateToggleButtonLabel();
pack(); // shrinks the window as much as it can
setResizable(false);
addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent evt) {
// turn overlay off before exiting
if (toggleButton_.isSelected()) {
toggleButton_.doClick();
}
}
});
gui_.registerForEvents(this);
}//constructor
private void updateToggleButtonLabel() {
String selectedOverlayStr = ((OverlayOption) overlayBox_.getSelectedItem()).toString();
if (toggleButton_.isSelected()) {
toggleButton_.setText("Hide " + selectedOverlayStr);
} else {
toggleButton_.setText("Show " + selectedOverlayStr);
}
}
/**
* Get reference to the live image window, through the MicroManager
* studioMainFrame instance.
*
* @return Reference to the ImagePlus object associated with the live
* window. Will return null if no live image is currently active.
*/
public static ImagePlus getLiveWindowImage () {
ImageWindow window = MMStudio.getInstance().getSnapLiveWin();
if (window == null) {
return null;
} else {
return window.getImagePlus();
}
}
/**
* Toggles overlay depending on the state of the Show/Hide button
* Also called when a new live/snap window opens
*/
@Subscribe
public void toggleOverlay(DisplayCreatedEvent dce) {
try {
boolean visible = toggleButton_.isSelected();
GenericOverlay selectedOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
selectedOverlay.setVisible(visible);
} catch (Exception ex) {
gui_.logError("Could not enable overlay ("
+ ((OverlayOption) overlayBox_.getSelectedItem()).toString() + "). "
+ "Error Message: " + ex.getMessage());
gui_.showMessage(
"The overlay could not be shown. Is the live image window active?",
ourFrame_);
toggleButton_.setSelected(false);
}
}
}
| plugins/PatternOverlay/src/org/micromanager/patternoverlay/PatternOverlayFrame.java | ///////////////////////////////////////////////////////////////////////////////
//FILE: PatternOverlayFrame.java
//PROJECT: Micro-Manager
//SUBSYSTEM: Image Overlay plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Jon Daniels
//
// COPYRIGHT: Applied Scientific Instrumentation, 2014
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.patternoverlay;
import ij.ImagePlus;
import ij.gui.ImageWindow;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.JToggleButton;
import javax.swing.JLabel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.micromanager.MMStudio;
import org.micromanager.api.ScriptInterface;
import org.micromanager.utils.MMFrame;
import net.miginfocom.swing.MigLayout;
/**
* The plugin window.
* Enables the user to set preferences like overlay type and size.
*
* To make a new pattern:
* 1. Derive from class GenericOverlay and implement getRoi() which
* creates and returns an ImageJ Roi object for the overlay.
* See existing overlays as examples
* (e.g. CrosshairOverlay.java, GridOverlay.java).
* 2. Add an entry to the OverayOption.Keys enum
* 3. Create/add the object along with associated key to the overlayModel
* variable in the constructor of PatternOverlayFrame.
*
* @author Matthijs
* @author Jon
*/
@SuppressWarnings("serial")
public class PatternOverlayFrame extends MMFrame {
private final ScriptInterface gui_;
private final JComboBox overlayBox_;
private final JToggleButton toggleButton_;
private final JSlider sizeSlider_;
private final JComboBox colorBox_;
private GenericOverlay lastOverlay_;
public PatternOverlayFrame(ScriptInterface gui) {
this.setLayout(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
gui_ = gui;
final MMFrame ourFrame = this;
loadAndRestorePosition(100, 100, WIDTH, WIDTH);
lastOverlay_ = null;
add(new JLabel("Type:"));
overlayBox_ = new JComboBox();
add(overlayBox_, "wrap");
DefaultComboBoxModel overlayModel = new DefaultComboBoxModel();
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.CROSSHAIR,
new CrosshairOverlay(getPrefsNode(), OverlayOption.Keys.CROSSHAIR.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.GRID,
new GridOverlay(getPrefsNode(), OverlayOption.Keys.GRID.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.CIRCLE,
new CircleOverlay(getPrefsNode(), OverlayOption.Keys.CIRCLE.toString() + "_")));
overlayModel.addElement(new OverlayOption(OverlayOption.Keys.TARGET,
new TargetOverlay(getPrefsNode(), OverlayOption.Keys.TARGET.toString() + "_")));
overlayBox_.setModel(overlayModel);
overlayBox_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
getPrefsNode().putInt(Constants.TYPE_BOX_IDX, overlayBox_.getSelectedIndex());
updateToggleButtonLabel();
GenericOverlay currentOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
try {
// turn off the last-used overlay
if (lastOverlay_ != null) {
lastOverlay_.setVisible(false);
}
currentOverlay.setVisible(toggleButton_.isSelected());
sizeSlider_.setValue(currentOverlay.getSize());
sizeSlider_.repaint();
colorBox_.setSelectedIndex(currentOverlay.getColorCode());
} catch (Exception e1) {
gui_.showError(e1, ourFrame);
}
lastOverlay_ = currentOverlay;
}
});
toggleButton_ = new JToggleButton();
add(toggleButton_, "span 2, wrap, growx");
toggleButton_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
try {
boolean visible = toggleButton_.isSelected();
GenericOverlay selectedOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
selectedOverlay.setVisible(visible);
updateToggleButtonLabel();
} catch (Exception ex) {
gui_.logError("Could not enable overlay (" +
((OverlayOption) overlayBox_.getSelectedItem()).toString() + "). "
+ "Error Message: " + ex.getMessage() );
gui_.showMessage(
"The overlay could not be shown. Is the live image window active?",
ourFrame);
toggleButton_.setSelected(false);
}
}
});
add(new JLabel("Size:"));
sizeSlider_ = new JSlider();
add(sizeSlider_, "wrap, width ::80");
sizeSlider_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
((OverlayOption) overlayBox_.getSelectedItem()).getOverlay().setSize(sizeSlider_.getValue());
// pref save handled by GenericOverlay
}
});
add(new JLabel("Color:"));
colorBox_ = new JComboBox();
add(colorBox_, "wrap");
DefaultComboBoxModel colorModel = new DefaultComboBoxModel(Constants.COLOR_OPTION_ARRAY);
colorBox_.setModel(colorModel);
colorBox_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
((OverlayOption) overlayBox_.getSelectedItem()).getOverlay().setColorCode(colorBox_.getSelectedIndex());
// pref save handled by GenericOverlay
}
});
// setting this from prefs needs to come after toggle button is created
// and also color and size boxes because all are referenced by ActionListener
overlayBox_.setSelectedIndex(getPrefsNode().getInt(Constants.TYPE_BOX_IDX, 0));
updateToggleButtonLabel();
pack(); // shrinks the window as much as it can
setResizable(false);
addWindowListener(new java.awt.event.WindowAdapter() {
@Override
public void windowClosing(java.awt.event.WindowEvent evt) {
// turn overlay off before exiting
if (toggleButton_.isSelected()) {
toggleButton_.doClick();
}
}
});
}//constructor
private void updateToggleButtonLabel() {
String selectedOverlayStr = ((OverlayOption) overlayBox_.getSelectedItem()).toString();
if (toggleButton_.isSelected()) {
toggleButton_.setText("Hide " + selectedOverlayStr);
} else {
toggleButton_.setText("Show " + selectedOverlayStr);
}
}
/**
* Get reference to the live image window, through the MicroManager
* studioMainFrame instance.
*
* @return Reference to the ImagePlus object associated with the live
* window. Will return null if no live image is currently active.
*/
public static ImagePlus getLiveWindowImage () {
ImageWindow window = MMStudio.getInstance().getSnapLiveWin();
if (window == null) {
return null;
} else {
return window.getImagePlus();
}
}
}
| PatternOverlay plugin: now show the overlay also when the nap/live window is closed and re-opened
git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@14723 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
| plugins/PatternOverlay/src/org/micromanager/patternoverlay/PatternOverlayFrame.java | PatternOverlay plugin: now show the overlay also when the nap/live window is closed and re-opened | <ide><path>lugins/PatternOverlay/src/org/micromanager/patternoverlay/PatternOverlayFrame.java
<ide> package org.micromanager.patternoverlay;
<ide>
<ide>
<add>import com.google.common.eventbus.Subscribe;
<ide> import ij.ImagePlus;
<ide> import ij.gui.ImageWindow;
<ide>
<ide> import org.micromanager.utils.MMFrame;
<ide>
<ide> import net.miginfocom.swing.MigLayout;
<add>import org.micromanager.events.DisplayCreatedEvent;
<ide>
<ide>
<ide> /**
<ide> * @author Matthijs
<ide> * @author Jon
<ide> */
<del>@SuppressWarnings("serial")
<add>@SuppressWarnings("LeakingThisInConstructor")
<ide> public class PatternOverlayFrame extends MMFrame {
<ide> private final ScriptInterface gui_;
<ide> private final JComboBox overlayBox_;
<ide> private final JToggleButton toggleButton_;
<ide> private final JSlider sizeSlider_;
<ide> private final JComboBox colorBox_;
<add> private final MMFrame ourFrame_ = this;
<ide>
<ide> private GenericOverlay lastOverlay_;
<ide>
<ide> "[right]10[center]",
<ide> "[]8[]"));
<ide> gui_ = gui;
<del> final MMFrame ourFrame = this;
<ide> loadAndRestorePosition(100, 100, WIDTH, WIDTH);
<ide>
<ide> lastOverlay_ = null;
<ide> sizeSlider_.repaint();
<ide> colorBox_.setSelectedIndex(currentOverlay.getColorCode());
<ide> } catch (Exception e1) {
<del> gui_.showError(e1, ourFrame);
<add> gui_.showError(e1, ourFrame_);
<ide> }
<ide> lastOverlay_ = currentOverlay;
<ide> }
<ide> toggleButton_.addActionListener(new ActionListener() {
<ide> @Override
<ide> public void actionPerformed(ActionEvent e) {
<del> try {
<del> boolean visible = toggleButton_.isSelected();
<del> GenericOverlay selectedOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
<del> selectedOverlay.setVisible(visible);
<del> updateToggleButtonLabel();
<del> } catch (Exception ex) {
<del> gui_.logError("Could not enable overlay (" +
<del> ((OverlayOption) overlayBox_.getSelectedItem()).toString() + "). "
<del> + "Error Message: " + ex.getMessage() );
<del> gui_.showMessage(
<del> "The overlay could not be shown. Is the live image window active?",
<del> ourFrame);
<del> toggleButton_.setSelected(false);
<del> }
<add> toggleOverlay(null);
<ide> }
<ide> });
<ide>
<ide> }
<ide> }
<ide> });
<add> gui_.registerForEvents(this);
<ide>
<ide> }//constructor
<ide>
<ide> }
<ide> }
<ide>
<add> /**
<add> * Toggles overlay depending on the state of the Show/Hide button
<add> * Also called when a new live/snap window opens
<add> */
<add> @Subscribe
<add> public void toggleOverlay(DisplayCreatedEvent dce) {
<add> try {
<add> boolean visible = toggleButton_.isSelected();
<add> GenericOverlay selectedOverlay = ((OverlayOption) overlayBox_.getSelectedItem()).getOverlay();
<add> selectedOverlay.setVisible(visible);
<add> } catch (Exception ex) {
<add> gui_.logError("Could not enable overlay ("
<add> + ((OverlayOption) overlayBox_.getSelectedItem()).toString() + "). "
<add> + "Error Message: " + ex.getMessage());
<add> gui_.showMessage(
<add> "The overlay could not be shown. Is the live image window active?",
<add> ourFrame_);
<add> toggleButton_.setSelected(false);
<add> }
<add> }
<add>
<ide> } |
|
Java | unlicense | f06dcfcf332a6816e070762411903b61e143c4c1 | 0 | Samourai-Wallet/samourai-wallet-android | app/src/main/java/com/samourai/wallet/service/SamouraiService.java | package com.samourai.wallet.service;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Binder;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Log;
import com.samourai.wallet.util.ConnectivityStatus;
import com.samourai.wallet.util.ExchangeRateFactory;
import com.samourai.wallet.util.WebUtil;
import java.util.Timer;
import java.util.TimerTask;
public class SamouraiService extends Service {
public static final String ACTION_UPDATE_BALANCE = "com.samourai.wallet.service.UPDATE_BALANCE";
public static final String ACTION_UPDATE_UTXOS = "com.samourai.wallet.service.UPDATE_UTXOS";
public static final String ACTION_UPDATE_EXCHANGE_RATES = "com.samourai.wallet.service.UPDATE_EXCHANGERATES";
public static final int MESSAGE_FROM_SERVICE = 0;
private IBinder binder = new SamouraiBinder();
private Context context = null;
private Timer timer = null;
private Handler handler = null;
private final BroadcastReceiver serviceReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context arg0, Intent arg1) {
;
}
};
@Override
public void onCreate() {
Log.i("SamouraiService", "creating service");
this.context = this;
setFilter();
start();
}
@Override
public IBinder onBind(Intent intent) {
return binder;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
return Service.START_NOT_STICKY;
}
@Override
public void onDestroy() {
Log.i("SamouraiService", "exiting service");
super.onDestroy();
}
public void setHandler(Handler handler) {
this.handler = handler;
}
private void setFilter() {
IntentFilter filter = new IntentFilter();
filter.addAction(ACTION_UPDATE_BALANCE);
filter.addAction(ACTION_UPDATE_UTXOS);
filter.addAction(ACTION_UPDATE_EXCHANGE_RATES);
registerReceiver(serviceReceiver, filter);
}
public class SamouraiBinder extends Binder {
public SamouraiService getService() {
return SamouraiService.this;
}
}
private void start() {
timer = new Timer();
handler = new Handler();
timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
handler.post(new Runnable() {
@Override
public void run() {
Log.d("SamouraiService", "do exchange rates");
doExchangeRates();
}
});
}
}, 2000, 60000 * 15);
// doAPIUpdate();
}
public void stop() {
;
}
private void doExchangeRates() {
if(ConnectivityStatus.hasConnectivity(SamouraiService.this)) {
new Thread(new Runnable() {
@Override
public void run() {
Looper.prepare();
String response = null;
try {
response = WebUtil.getInstance(null).getURL(WebUtil.LBC_EXCHANGE_URL);
ExchangeRateFactory.getInstance(SamouraiService.this).setDataLBC(response);
ExchangeRateFactory.getInstance(SamouraiService.this).parseLBC();
}
catch(Exception e) {
e.printStackTrace();
}
response = null;
try {
response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_usd");
ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
}
catch(Exception e) {
e.printStackTrace();
}
response = null;
try {
response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_rur");
ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
}
catch(Exception e) {
e.printStackTrace();
}
response = null;
try {
response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_eur");
ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
}
catch(Exception e) {
e.printStackTrace();
}
response = null;
try {
response = WebUtil.getInstance(null).getURL(WebUtil.BFX_EXCHANGE_URL);
ExchangeRateFactory.getInstance(SamouraiService.this).setDataBFX(response);
ExchangeRateFactory.getInstance(SamouraiService.this).parseBFX();
}
catch(Exception e) {
e.printStackTrace();
}
Looper.loop();
}
}).start();
}
}
private void doAPIUpdate() {
Intent intent = new Intent("com.samourai.wallet.BalanceFragment.REFRESH");
intent.putExtra("notifTx", false);
intent.putExtra("fetch", true);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
//
// calls from Activities
//
public void write(byte[] data) {
;
}
}
| remove dead code
| app/src/main/java/com/samourai/wallet/service/SamouraiService.java | remove dead code | <ide><path>pp/src/main/java/com/samourai/wallet/service/SamouraiService.java
<del>package com.samourai.wallet.service;
<del>
<del>import android.app.Service;
<del>import android.content.BroadcastReceiver;
<del>import android.content.Context;
<del>import android.content.Intent;
<del>import android.content.IntentFilter;
<del>import android.os.Binder;
<del>import android.os.Handler;
<del>import android.os.IBinder;
<del>import android.os.Looper;
<del>import android.support.v4.content.LocalBroadcastManager;
<del>import android.util.Log;
<del>
<del>import com.samourai.wallet.util.ConnectivityStatus;
<del>import com.samourai.wallet.util.ExchangeRateFactory;
<del>import com.samourai.wallet.util.WebUtil;
<del>
<del>import java.util.Timer;
<del>import java.util.TimerTask;
<del>
<del>public class SamouraiService extends Service {
<del>
<del> public static final String ACTION_UPDATE_BALANCE = "com.samourai.wallet.service.UPDATE_BALANCE";
<del> public static final String ACTION_UPDATE_UTXOS = "com.samourai.wallet.service.UPDATE_UTXOS";
<del> public static final String ACTION_UPDATE_EXCHANGE_RATES = "com.samourai.wallet.service.UPDATE_EXCHANGERATES";
<del>
<del> public static final int MESSAGE_FROM_SERVICE = 0;
<del>
<del> private IBinder binder = new SamouraiBinder();
<del>
<del> private Context context = null;
<del>
<del> private Timer timer = null;
<del> private Handler handler = null;
<del>
<del> private final BroadcastReceiver serviceReceiver = new BroadcastReceiver() {
<del> @Override
<del> public void onReceive(Context arg0, Intent arg1) {
<del> ;
<del> }
<del> };
<del>
<del> @Override
<del> public void onCreate() {
<del>
<del> Log.i("SamouraiService", "creating service");
<del>
<del> this.context = this;
<del>
<del> setFilter();
<del>
<del> start();
<del>
<del> }
<del>
<del> @Override
<del> public IBinder onBind(Intent intent) {
<del> return binder;
<del> }
<del>
<del> @Override
<del> public int onStartCommand(Intent intent, int flags, int startId) {
<del> return Service.START_NOT_STICKY;
<del> }
<del>
<del> @Override
<del> public void onDestroy() {
<del>
<del> Log.i("SamouraiService", "exiting service");
<del>
<del> super.onDestroy();
<del> }
<del>
<del> public void setHandler(Handler handler) {
<del> this.handler = handler;
<del> }
<del>
<del> private void setFilter() {
<del> IntentFilter filter = new IntentFilter();
<del> filter.addAction(ACTION_UPDATE_BALANCE);
<del> filter.addAction(ACTION_UPDATE_UTXOS);
<del> filter.addAction(ACTION_UPDATE_EXCHANGE_RATES);
<del> registerReceiver(serviceReceiver, filter);
<del> }
<del>
<del> public class SamouraiBinder extends Binder {
<del> public SamouraiService getService() {
<del> return SamouraiService.this;
<del> }
<del> }
<del>
<del> private void start() {
<del>
<del> timer = new Timer();
<del> handler = new Handler();
<del> timer.scheduleAtFixedRate(new TimerTask() {
<del> @Override
<del> public void run() {
<del> handler.post(new Runnable() {
<del> @Override
<del> public void run() {
<del> Log.d("SamouraiService", "do exchange rates");
<del> doExchangeRates();
<del> }
<del> });
<del> }
<del> }, 2000, 60000 * 15);
<del>
<del>// doAPIUpdate();
<del>
<del> }
<del>
<del> public void stop() {
<del> ;
<del> }
<del>
<del> private void doExchangeRates() {
<del>
<del> if(ConnectivityStatus.hasConnectivity(SamouraiService.this)) {
<del>
<del> new Thread(new Runnable() {
<del> @Override
<del> public void run() {
<del> Looper.prepare();
<del>
<del> String response = null;
<del> try {
<del> response = WebUtil.getInstance(null).getURL(WebUtil.LBC_EXCHANGE_URL);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).setDataLBC(response);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).parseLBC();
<del> }
<del> catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> response = null;
<del> try {
<del> response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_usd");
<del> ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
<del> }
<del> catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> response = null;
<del> try {
<del> response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_rur");
<del> ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
<del> }
<del> catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> response = null;
<del> try {
<del> response = WebUtil.getInstance(null).getURL(WebUtil.BTCe_EXCHANGE_URL + "btc_eur");
<del> ExchangeRateFactory.getInstance(SamouraiService.this).setDataBTCe(response);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).parseBTCe();
<del> }
<del> catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> response = null;
<del> try {
<del> response = WebUtil.getInstance(null).getURL(WebUtil.BFX_EXCHANGE_URL);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).setDataBFX(response);
<del> ExchangeRateFactory.getInstance(SamouraiService.this).parseBFX();
<del> }
<del> catch(Exception e) {
<del> e.printStackTrace();
<del> }
<del>
<del> Looper.loop();
<del>
<del> }
<del> }).start();
<del>
<del> }
<del>
<del> }
<del>
<del> private void doAPIUpdate() {
<del> Intent intent = new Intent("com.samourai.wallet.BalanceFragment.REFRESH");
<del> intent.putExtra("notifTx", false);
<del> intent.putExtra("fetch", true);
<del> LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
<del> }
<del>
<del> //
<del> // calls from Activities
<del> //
<del> public void write(byte[] data) {
<del> ;
<del> }
<del>
<del>} |
||
Java | apache-2.0 | 858a360e8e950ecf921b69349e84e321b17cf07b | 0 | osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid | package org.osmdroid.config;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.util.Log;
import org.osmdroid.api.IMapView;
import org.osmdroid.tileprovider.modules.SqlTileWriter;
import org.osmdroid.tileprovider.util.StorageUtils;
import java.io.File;
import java.net.Proxy;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static org.osmdroid.tileprovider.constants.OpenStreetMapTileProviderConstants.HTTP_EXPIRES_HEADER_FORMAT;
/**
* Default configuration provider for osmdroid
* <a href="https://github.com/osmdroid/osmdroid/issues/481">Issue 481</a>
* Created on 11/29/2016.
* @author Alex O'Ree
* @see IConfigurationProvider
* @see Configuration
*/
public class DefaultConfigurationProvider implements IConfigurationProvider {
protected long gpsWaitTime =20000;
protected boolean debugMode= false;
protected boolean debugMapView = false;
protected boolean debugTileProviders = false;
protected boolean debugMapTileDownloader=false;
protected boolean isMapViewHardwareAccelerated=false;
protected String userAgentValue="osmdroid";
protected String userAgentHttpHeader = "User-Agent";
private final Map<String, String> mAdditionalHttpRequestProperties = new HashMap<>();
protected short cacheMapTileCount = 9;
protected short tileDownloadThreads = 2;
protected short tileFileSystemThreads = 8;
protected short tileDownloadMaxQueueSize = 40;
protected short tileFileSystemMaxQueueSize = 40;
protected long tileFileSystemCacheMaxBytes = 600L * 1024 * 1024;
protected long tileFileSystemCacheTrimBytes = 500L * 1024 * 1024;
protected SimpleDateFormat httpHeaderDateTimeFormat = new SimpleDateFormat(HTTP_EXPIRES_HEADER_FORMAT, Locale.US);
protected File osmdroidBasePath;
protected File osmdroidTileCache;
protected long expirationAdder = 0;
protected Long expirationOverride=null;
protected Proxy httpProxy=null;
protected int animationSpeedDefault =1000;
protected int animationSpeedShort =500;
protected boolean mapViewRecycler=true;
public DefaultConfigurationProvider(){
}
/**
* default is 20 seconds
* @return time in ms
*/
@Override
public long getGpsWaitTime() {
return gpsWaitTime;
}
@Override
public void setGpsWaitTime(long gpsWaitTime) {
this.gpsWaitTime = gpsWaitTime;
}
@Override
public boolean isDebugMode() {
return debugMode;
}
@Override
public void setDebugMode(boolean debugMode) {
this.debugMode = debugMode;
}
@Override
public boolean isDebugMapView() {
return debugMapView;
}
@Override
public void setDebugMapView(boolean debugMapView) {
this.debugMapView = debugMapView;
}
@Override
public boolean isDebugTileProviders() {
return debugTileProviders;
}
@Override
public void setDebugTileProviders(boolean debugTileProviders) {
this.debugTileProviders = debugTileProviders;
}
@Override
public boolean isDebugMapTileDownloader() {
return debugMapTileDownloader;
}
@Override
public void setDebugMapTileDownloader(boolean debugMapTileDownloader) {
this.debugMapTileDownloader = debugMapTileDownloader;
}
@Override
public boolean isMapViewHardwareAccelerated() {
return isMapViewHardwareAccelerated;
}
@Override
public void setMapViewHardwareAccelerated(boolean mapViewHardwareAccelerated) {
isMapViewHardwareAccelerated = mapViewHardwareAccelerated;
}
@Override
public String getUserAgentValue() {
return userAgentValue;
}
@Override
public void setUserAgentValue(String userAgentValue) {
this.userAgentValue = userAgentValue;
}
@Override
public Map<String, String> getAdditionalHttpRequestProperties() {
return mAdditionalHttpRequestProperties;
}
@Override
public short getCacheMapTileCount() {
return cacheMapTileCount;
}
@Override
public void setCacheMapTileCount(short cacheMapTileCount) {
this.cacheMapTileCount = cacheMapTileCount;
}
@Override
public short getTileDownloadThreads() {
return tileDownloadThreads;
}
@Override
public void setTileDownloadThreads(short tileDownloadThreads) {
this.tileDownloadThreads = tileDownloadThreads;
}
@Override
public short getTileFileSystemThreads() {
return tileFileSystemThreads;
}
@Override
public void setTileFileSystemThreads(short tileFileSystemThreads) {
this.tileFileSystemThreads = tileFileSystemThreads;
}
@Override
public short getTileDownloadMaxQueueSize() {
return tileDownloadMaxQueueSize;
}
@Override
public void setTileDownloadMaxQueueSize(short tileDownloadMaxQueueSize) {
this.tileDownloadMaxQueueSize = tileDownloadMaxQueueSize;
}
@Override
public short getTileFileSystemMaxQueueSize() {
return tileFileSystemMaxQueueSize;
}
@Override
public void setTileFileSystemMaxQueueSize(short tileFileSystemMaxQueueSize) {
this.tileFileSystemMaxQueueSize = tileFileSystemMaxQueueSize;
}
@Override
public long getTileFileSystemCacheMaxBytes() {
return tileFileSystemCacheMaxBytes;
}
@Override
public void setTileFileSystemCacheMaxBytes(long tileFileSystemCacheMaxBytes) {
this.tileFileSystemCacheMaxBytes = tileFileSystemCacheMaxBytes;
}
@Override
public long getTileFileSystemCacheTrimBytes() {
return tileFileSystemCacheTrimBytes;
}
@Override
public void setTileFileSystemCacheTrimBytes(long tileFileSystemCacheTrimBytes) {
this.tileFileSystemCacheTrimBytes = tileFileSystemCacheTrimBytes;
}
@Override
public SimpleDateFormat getHttpHeaderDateTimeFormat() {
return httpHeaderDateTimeFormat;
}
@Override
public void setHttpHeaderDateTimeFormat(SimpleDateFormat httpHeaderDateTimeFormat) {
this.httpHeaderDateTimeFormat = httpHeaderDateTimeFormat;
}
@Override
public Proxy getHttpProxy() {
return httpProxy;
}
@Override
public void setHttpProxy(Proxy httpProxy) {
this.httpProxy = httpProxy;
}
@Override
public File getOsmdroidBasePath() {
if (osmdroidBasePath==null)
osmdroidBasePath = new File(StorageUtils.getStorage().getAbsolutePath(), "osmdroid");
try {
osmdroidBasePath.mkdirs();
}catch (Exception ex){
Log.d(IMapView.LOGTAG, "Unable to create base path at " + osmdroidBasePath.getAbsolutePath(), ex);
//IO/permissions issue
//trap for android studio layout editor and some for certain devices
//see https://github.com/osmdroid/osmdroid/issues/508
}
return osmdroidBasePath;
}
@Override
public void setOsmdroidBasePath(File osmdroidBasePath) {
this.osmdroidBasePath = osmdroidBasePath;
}
@Override
public File getOsmdroidTileCache() {
if (osmdroidTileCache==null)
osmdroidTileCache = new File(getOsmdroidBasePath(), "tiles");
try {
osmdroidTileCache.mkdirs();
}catch (Exception ex){
Log.d(IMapView.LOGTAG, "Unable to create tile cache path at " + osmdroidTileCache.getAbsolutePath(), ex);
//IO/permissions issue
//trap for android studio layout editor and some for certain devices
//see https://github.com/osmdroid/osmdroid/issues/508
}
return osmdroidTileCache;
}
@Override
public void setOsmdroidTileCache(File osmdroidTileCache) {
this.osmdroidTileCache = osmdroidTileCache;
}
@Override
public String getUserAgentHttpHeader() {
return userAgentHttpHeader;
}
@Override
public void setUserAgentHttpHeader(String userAgentHttpHeader) {
this.userAgentHttpHeader = userAgentHttpHeader;
}
//</editor-fold>
@Override
public void load(Context ctx, SharedPreferences prefs) {
//cache management starts here
//check to see if the shared preferences is set for the tile cache
if (!prefs.contains("osmdroid.basePath")){
//this is the first time startup. run the discovery bit
File discoveredBestPath = getOsmdroidBasePath();
File discoveredCachPath = getOsmdroidTileCache();
if (!discoveredBestPath.exists() || !StorageUtils.isWritable(discoveredBestPath)) {
//this should always be writable...
discoveredCachPath=discoveredBestPath=new File("/data/data/" + ctx.getPackageName() + "/osmdroid/");
discoveredCachPath.mkdirs();
}
SharedPreferences.Editor edit = prefs.edit();
edit.putString("osmdroid.basePath",discoveredBestPath.getAbsolutePath());
edit.putString("osmdroid.cachePath",discoveredCachPath.getAbsolutePath());
edit.commit();
setOsmdroidBasePath(discoveredBestPath);
setOsmdroidTileCache(discoveredCachPath);
setUserAgentValue(ctx.getPackageName());
save(ctx,prefs);
} else {
//normal startup, load user preferences and populate the config object
setOsmdroidBasePath(new File(prefs.getString("osmdroid.basePath", getOsmdroidBasePath().getAbsolutePath())));
setOsmdroidTileCache(new File(prefs.getString("osmdroid.cachePath", getOsmdroidTileCache().getAbsolutePath())));
setDebugMode(prefs.getBoolean("osmdroid.DebugMode",false));
setDebugMapTileDownloader(prefs.getBoolean("osmdroid.DebugDownloading", false));
setDebugMapView(prefs.getBoolean("osmdroid.DebugMapView", false));
setDebugTileProviders(prefs.getBoolean("osmdroid.DebugTileProvider",false));
setMapViewHardwareAccelerated(prefs.getBoolean("osmdroid.HardwareAcceleration",false));
setUserAgentValue(prefs.getString("osmdroid.userAgentValue",ctx.getPackageName()));
load(prefs, mAdditionalHttpRequestProperties, "osmdroid.additionalHttpRequestProperty.");
setGpsWaitTime(prefs.getLong("osmdroid.gpsWaitTime", gpsWaitTime));
setTileDownloadThreads((short)(prefs.getInt("osmdroid.tileDownloadThreads",tileDownloadThreads)));
setTileFileSystemThreads((short)(prefs.getInt("osmdroid.tileFileSystemThreads",tileFileSystemThreads)));
setTileDownloadMaxQueueSize((short)(prefs.getInt("osmdroid.tileDownloadMaxQueueSize",tileDownloadMaxQueueSize)));
setTileFileSystemMaxQueueSize((short)(prefs.getInt("osmdroid.tileFileSystemMaxQueueSize",tileFileSystemMaxQueueSize)));
setExpirationExtendedDuration((long)prefs.getLong("osmdroid.ExpirationExtendedDuration", expirationAdder));
setMapViewRecyclerFriendly((boolean)prefs.getBoolean("osmdroid.mapViewRecycler", mapViewRecycler));
setAnimationSpeedDefault(prefs.getInt("osmdroid.ZoomSpeedDefault", animationSpeedDefault));
setAnimationSpeedShort(prefs.getInt("osmdroid.animationSpeedShort", animationSpeedShort));
if (prefs.contains("osmdroid.ExpirationOverride")) {
expirationOverride = prefs.getLong("osmdroid.ExpirationOverride",-1);
if (expirationOverride!=null && expirationOverride==-1)
expirationOverride=null;
}
}
long cacheSize=-1;
if (Build.VERSION.SDK_INT >= 9) {
//unfortunately API 8 doesn't support File.length()
//https://github/osmdroid/osmdroid/issues/435
//On startup, we auto set the max cache size to be the current cache size + free disk space
//this reduces the chance of osmdroid completely filling up the storage device
//if the default max cache size is greater than the available free space
//reduce it to 95% of the available free space + the size of the cache
File dbFile = new File(getOsmdroidTileCache().getAbsolutePath() + File.separator + SqlTileWriter.DATABASE_FILENAME);
if (dbFile.exists()) {
cacheSize = dbFile.length();
long freeSpace = getOsmdroidTileCache().getFreeSpace();
//Log.i(TAG, "Current cache size is " + cacheSize + " free space is " + freeSpace);
if (getTileFileSystemCacheMaxBytes() > (freeSpace + cacheSize)){
setTileFileSystemCacheMaxBytes((long)((freeSpace + cacheSize) * 0.95));
setTileFileSystemCacheTrimBytes((long)((freeSpace + cacheSize) * 0.90));
}
} else {
//this is probably the first time running osmdroid
long freeSpace = getOsmdroidTileCache().length();
if (getTileFileSystemCacheMaxBytes() > (freeSpace)){
setTileFileSystemCacheMaxBytes((long)((freeSpace) * 0.95));
setTileFileSystemCacheMaxBytes((long)((freeSpace) * 0.90));
}
}
}
}
@Override
public void save(Context ctx, SharedPreferences prefs) {
SharedPreferences.Editor edit = prefs.edit();
edit.putString("osmdroid.basePath",getOsmdroidBasePath().getAbsolutePath());
edit.putString("osmdroid.cachePath",getOsmdroidTileCache().getAbsolutePath());
edit.putBoolean("osmdroid.DebugMode",isDebugMode());
edit.putBoolean("osmdroid.DebugDownloading",isDebugMapTileDownloader());
edit.putBoolean("osmdroid.DebugMapView",isDebugMapView());
edit.putBoolean("osmdroid.DebugTileProvider",isDebugTileProviders());
edit.putBoolean("osmdroid.HardwareAcceleration", isMapViewHardwareAccelerated());
edit.putString("osmdroid.userAgentValue", getUserAgentValue());
save(prefs, edit, mAdditionalHttpRequestProperties, "osmdroid.additionalHttpRequestProperty.");
edit.putLong("osmdroid.gpsWaitTime",gpsWaitTime);
edit.putInt("osmdroid.cacheMapTileCount", cacheMapTileCount);
edit.putInt("osmdroid.tileDownloadThreads", tileDownloadThreads);
edit.putInt("osmdroid.tileFileSystemThreads",tileFileSystemThreads);
edit.putInt("osmdroid.tileDownloadMaxQueueSize",tileDownloadMaxQueueSize);
edit.putInt("osmdroid.tileFileSystemMaxQueueSize",tileFileSystemMaxQueueSize);
edit.putLong("osmdroid.ExpirationExtendedDuration",expirationAdder);
if (expirationOverride!=null)
edit.putLong("osmdroid.ExpirationOverride",expirationOverride);
//TODO save other fields?
edit.putInt("osmdroid.ZoomSpeedDefault", animationSpeedDefault);
edit.putInt("osmdroid.animationSpeedShort", animationSpeedShort);
edit.putBoolean("osmdroid.mapViewRecycler", mapViewRecycler);
edit.commit();
}
/**
* Loading a map from preferences, using a prefix for the prefs keys
*
* @since 5.6.5
* @param pPrefs
* @param pMap
* @param pPrefix
*/
private static void load(final SharedPreferences pPrefs,
final Map<String, String> pMap, final String pPrefix) {
pMap.clear();
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pMap.put(key.substring(pPrefix.length()), pPrefs.getString(key, null));
}
}
}
/**
* Saving a map into preferences, using a prefix for the prefs keys
*
* @since 5.6.5
* @param pPrefs
* @param pEdit
* @param pMap
* @param pPrefix
*/
private static void save(final SharedPreferences pPrefs, final SharedPreferences.Editor pEdit,
final Map<String, String> pMap, final String pPrefix) {
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pEdit.remove(key);
}
}
for (final Map.Entry<String, String> entry : pMap.entrySet()) {
final String key = pPrefix + entry.getKey();
pEdit.putString(key, entry.getValue());
}
}
@Override
public long getExpirationExtendedDuration() {
return expirationAdder ;
}
@Override
public void setExpirationExtendedDuration(final long period) {
if (period < 0)
expirationAdder=0;
else
expirationAdder=period;
}
@Override
public void setExpirationOverrideDuration(Long period) {
expirationOverride=period;
}
@Override
public Long getExpirationOverrideDuration() {
return expirationOverride;
}
@Override
public void setAnimationSpeedDefault(int durationsMilliseconds) {
this.animationSpeedDefault =durationsMilliseconds;
}
@Override
public int getAnimationSpeedDefault() {
return animationSpeedDefault;
}
@Override
public void setAnimationSpeedShort(int durationsMilliseconds) {
this.animationSpeedShort = durationsMilliseconds;
}
@Override
public int getAnimationSpeedShort() {
return animationSpeedShort;
}
@Override
public boolean isMapViewRecyclerFriendly() {
return mapViewRecycler;
}
@Override
public void setMapViewRecyclerFriendly(boolean enabled) {
this.mapViewRecycler=enabled;
}
}
| osmdroid-android/src/main/java/org/osmdroid/config/DefaultConfigurationProvider.java | package org.osmdroid.config;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import org.osmdroid.tileprovider.modules.SqlTileWriter;
import org.osmdroid.tileprovider.util.StorageUtils;
import java.io.File;
import java.net.Proxy;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import static org.osmdroid.tileprovider.constants.OpenStreetMapTileProviderConstants.HTTP_EXPIRES_HEADER_FORMAT;
/**
* Default configuration provider for osmdroid
* <a href="https://github.com/osmdroid/osmdroid/issues/481">Issue 481</a>
* Created on 11/29/2016.
* @author Alex O'Ree
* @see IConfigurationProvider
* @see Configuration
*/
public class DefaultConfigurationProvider implements IConfigurationProvider {
/*public final String[] preferenceKeys = new String[]{
"osmdroid.basePath",
"osmdroid.cachePath",
"osmdroid.DebugMode",
"osmdroid.DebugDownloading",
"osmdroid.DebugMapView",
"osmdroid.DebugTileProvider",
"osmdroid.HardwareAcceleration",
"osmdroid.userAgentValue",
"osmdroid.gpsWaitTime",
"osmdroid.tileDownloadThreads",
"osmdroid.tileFileSystemThreads",
"osmdroid.tileDownloadMaxQueueSize",
"osmdroid.tileFileSystemMaxQueueSize"
};*/
//<editor-fold>
protected long gpsWaitTime =20000;
protected boolean debugMode= false;
protected boolean debugMapView = false;
protected boolean debugTileProviders = false;
protected boolean debugMapTileDownloader=false;
protected boolean isMapViewHardwareAccelerated=false;
protected String userAgentValue="osmdroid";
protected String userAgentHttpHeader = "User-Agent";
private final Map<String, String> mAdditionalHttpRequestProperties = new HashMap<>();
protected short cacheMapTileCount = 9;
protected short tileDownloadThreads = 2;
protected short tileFileSystemThreads = 8;
protected short tileDownloadMaxQueueSize = 40;
protected short tileFileSystemMaxQueueSize = 40;
protected long tileFileSystemCacheMaxBytes = 600L * 1024 * 1024;
protected long tileFileSystemCacheTrimBytes = 500L * 1024 * 1024;
protected SimpleDateFormat httpHeaderDateTimeFormat = new SimpleDateFormat(HTTP_EXPIRES_HEADER_FORMAT, Locale.US);
protected File osmdroidBasePath;
protected File osmdroidTileCache;
protected long expirationAdder = 0;
protected Long expirationOverride=null;
protected Proxy httpProxy=null;
protected int animationSpeedDefault =1000;
protected int animationSpeedShort =500;
protected boolean mapViewRecycler=true;
public DefaultConfigurationProvider(){
try {
//StorageUtils.getStorage() can return null when android studio is "previewing" a layout
osmdroidBasePath = new File(StorageUtils.getStorage().getAbsolutePath(), "osmdroid");
osmdroidTileCache = new File(getOsmdroidBasePath(), "tiles");
osmdroidBasePath.mkdirs();
osmdroidTileCache.mkdirs();
}catch (Exception ex){
//IO/permissions issue
//trap for android studio layout editor and some for certain devices
//see https://github.com/osmdroid/osmdroid/issues/508
}
}
/**
* default is 20 seconds
* @return time in ms
*/
@Override
public long getGpsWaitTime() {
return gpsWaitTime;
}
@Override
public void setGpsWaitTime(long gpsWaitTime) {
this.gpsWaitTime = gpsWaitTime;
}
@Override
public boolean isDebugMode() {
return debugMode;
}
@Override
public void setDebugMode(boolean debugMode) {
this.debugMode = debugMode;
}
@Override
public boolean isDebugMapView() {
return debugMapView;
}
@Override
public void setDebugMapView(boolean debugMapView) {
this.debugMapView = debugMapView;
}
@Override
public boolean isDebugTileProviders() {
return debugTileProviders;
}
@Override
public void setDebugTileProviders(boolean debugTileProviders) {
this.debugTileProviders = debugTileProviders;
}
@Override
public boolean isDebugMapTileDownloader() {
return debugMapTileDownloader;
}
@Override
public void setDebugMapTileDownloader(boolean debugMapTileDownloader) {
this.debugMapTileDownloader = debugMapTileDownloader;
}
@Override
public boolean isMapViewHardwareAccelerated() {
return isMapViewHardwareAccelerated;
}
@Override
public void setMapViewHardwareAccelerated(boolean mapViewHardwareAccelerated) {
isMapViewHardwareAccelerated = mapViewHardwareAccelerated;
}
@Override
public String getUserAgentValue() {
return userAgentValue;
}
@Override
public void setUserAgentValue(String userAgentValue) {
this.userAgentValue = userAgentValue;
}
@Override
public Map<String, String> getAdditionalHttpRequestProperties() {
return mAdditionalHttpRequestProperties;
}
@Override
public short getCacheMapTileCount() {
return cacheMapTileCount;
}
@Override
public void setCacheMapTileCount(short cacheMapTileCount) {
this.cacheMapTileCount = cacheMapTileCount;
}
@Override
public short getTileDownloadThreads() {
return tileDownloadThreads;
}
@Override
public void setTileDownloadThreads(short tileDownloadThreads) {
this.tileDownloadThreads = tileDownloadThreads;
}
@Override
public short getTileFileSystemThreads() {
return tileFileSystemThreads;
}
@Override
public void setTileFileSystemThreads(short tileFileSystemThreads) {
this.tileFileSystemThreads = tileFileSystemThreads;
}
@Override
public short getTileDownloadMaxQueueSize() {
return tileDownloadMaxQueueSize;
}
@Override
public void setTileDownloadMaxQueueSize(short tileDownloadMaxQueueSize) {
this.tileDownloadMaxQueueSize = tileDownloadMaxQueueSize;
}
@Override
public short getTileFileSystemMaxQueueSize() {
return tileFileSystemMaxQueueSize;
}
@Override
public void setTileFileSystemMaxQueueSize(short tileFileSystemMaxQueueSize) {
this.tileFileSystemMaxQueueSize = tileFileSystemMaxQueueSize;
}
@Override
public long getTileFileSystemCacheMaxBytes() {
return tileFileSystemCacheMaxBytes;
}
@Override
public void setTileFileSystemCacheMaxBytes(long tileFileSystemCacheMaxBytes) {
this.tileFileSystemCacheMaxBytes = tileFileSystemCacheMaxBytes;
}
@Override
public long getTileFileSystemCacheTrimBytes() {
return tileFileSystemCacheTrimBytes;
}
@Override
public void setTileFileSystemCacheTrimBytes(long tileFileSystemCacheTrimBytes) {
this.tileFileSystemCacheTrimBytes = tileFileSystemCacheTrimBytes;
}
@Override
public SimpleDateFormat getHttpHeaderDateTimeFormat() {
return httpHeaderDateTimeFormat;
}
@Override
public void setHttpHeaderDateTimeFormat(SimpleDateFormat httpHeaderDateTimeFormat) {
this.httpHeaderDateTimeFormat = httpHeaderDateTimeFormat;
}
@Override
public Proxy getHttpProxy() {
return httpProxy;
}
@Override
public void setHttpProxy(Proxy httpProxy) {
this.httpProxy = httpProxy;
}
@Override
public File getOsmdroidBasePath() {
return osmdroidBasePath;
}
@Override
public void setOsmdroidBasePath(File osmdroidBasePath) {
this.osmdroidBasePath = osmdroidBasePath;
}
@Override
public File getOsmdroidTileCache() {
return osmdroidTileCache;
}
@Override
public void setOsmdroidTileCache(File osmdroidTileCache) {
this.osmdroidTileCache = osmdroidTileCache;
}
@Override
public String getUserAgentHttpHeader() {
return userAgentHttpHeader;
}
@Override
public void setUserAgentHttpHeader(String userAgentHttpHeader) {
this.userAgentHttpHeader = userAgentHttpHeader;
}
//</editor-fold>
@Override
public void load(Context ctx, SharedPreferences prefs) {
//cache management starts here
//check to see if the shared preferences is set for the tile cache
if (!prefs.contains("osmdroid.basePath")){
//this is the first time startup. run the discovery bit
File discoveredBestPath = getOsmdroidBasePath();
File discoveredCachPath = getOsmdroidTileCache();
if (!discoveredBestPath.exists() || !StorageUtils.isWritable(discoveredBestPath)) {
//this should always be writable...
discoveredCachPath=discoveredBestPath=new File("/data/data/" + ctx.getPackageName() + "/osmdroid/");
discoveredCachPath.mkdirs();
}
SharedPreferences.Editor edit = prefs.edit();
edit.putString("osmdroid.basePath",discoveredBestPath.getAbsolutePath());
edit.putString("osmdroid.cachePath",discoveredCachPath.getAbsolutePath());
edit.commit();
setOsmdroidBasePath(discoveredBestPath);
setOsmdroidTileCache(discoveredCachPath);
setUserAgentValue(ctx.getPackageName());
save(ctx,prefs);
} else {
//normal startup, load user preferences and populate the config object
setOsmdroidBasePath(new File(prefs.getString("osmdroid.basePath", getOsmdroidBasePath().getAbsolutePath())));
setOsmdroidTileCache(new File(prefs.getString("osmdroid.cachePath", getOsmdroidTileCache().getAbsolutePath())));
setDebugMode(prefs.getBoolean("osmdroid.DebugMode",false));
setDebugMapTileDownloader(prefs.getBoolean("osmdroid.DebugDownloading", false));
setDebugMapView(prefs.getBoolean("osmdroid.DebugMapView", false));
setDebugTileProviders(prefs.getBoolean("osmdroid.DebugTileProvider",false));
setMapViewHardwareAccelerated(prefs.getBoolean("osmdroid.HardwareAcceleration",false));
setUserAgentValue(prefs.getString("osmdroid.userAgentValue",ctx.getPackageName()));
load(prefs, mAdditionalHttpRequestProperties, "osmdroid.additionalHttpRequestProperty.");
setGpsWaitTime(prefs.getLong("osmdroid.gpsWaitTime", gpsWaitTime));
setTileDownloadThreads((short)(prefs.getInt("osmdroid.tileDownloadThreads",tileDownloadThreads)));
setTileFileSystemThreads((short)(prefs.getInt("osmdroid.tileFileSystemThreads",tileFileSystemThreads)));
setTileDownloadMaxQueueSize((short)(prefs.getInt("osmdroid.tileDownloadMaxQueueSize",tileDownloadMaxQueueSize)));
setTileFileSystemMaxQueueSize((short)(prefs.getInt("osmdroid.tileFileSystemMaxQueueSize",tileFileSystemMaxQueueSize)));
setExpirationExtendedDuration((long)prefs.getLong("osmdroid.ExpirationExtendedDuration", expirationAdder));
setMapViewRecyclerFriendly((boolean)prefs.getBoolean("osmdroid.mapViewRecycler", mapViewRecycler));
setAnimationSpeedDefault(prefs.getInt("osmdroid.ZoomSpeedDefault", animationSpeedDefault));
setAnimationSpeedShort(prefs.getInt("osmdroid.animationSpeedShort", animationSpeedShort));
if (prefs.contains("osmdroid.ExpirationOverride")) {
expirationOverride = prefs.getLong("osmdroid.ExpirationOverride",-1);
if (expirationOverride!=null && expirationOverride==-1)
expirationOverride=null;
}
}
long cacheSize=-1;
if (Build.VERSION.SDK_INT >= 9) {
//unfortunately API 8 doesn't support File.length()
//https://github/osmdroid/osmdroid/issues/435
//On startup, we auto set the max cache size to be the current cache size + free disk space
//this reduces the chance of osmdroid completely filling up the storage device
//if the default max cache size is greater than the available free space
//reduce it to 95% of the available free space + the size of the cache
File dbFile = new File(getOsmdroidTileCache().getAbsolutePath() + File.separator + SqlTileWriter.DATABASE_FILENAME);
if (dbFile.exists()) {
cacheSize = dbFile.length();
long freeSpace = getOsmdroidTileCache().getFreeSpace();
//Log.i(TAG, "Current cache size is " + cacheSize + " free space is " + freeSpace);
if (getTileFileSystemCacheMaxBytes() > (freeSpace + cacheSize)){
setTileFileSystemCacheMaxBytes((long)((freeSpace + cacheSize) * 0.95));
setTileFileSystemCacheTrimBytes((long)((freeSpace + cacheSize) * 0.90));
}
} else {
//this is probably the first time running osmdroid
long freeSpace = getOsmdroidTileCache().length();
if (getTileFileSystemCacheMaxBytes() > (freeSpace)){
setTileFileSystemCacheMaxBytes((long)((freeSpace) * 0.95));
setTileFileSystemCacheMaxBytes((long)((freeSpace) * 0.90));
}
}
}
}
@Override
public void save(Context ctx, SharedPreferences prefs) {
SharedPreferences.Editor edit = prefs.edit();
edit.putString("osmdroid.basePath",getOsmdroidBasePath().getAbsolutePath());
edit.putString("osmdroid.cachePath",getOsmdroidTileCache().getAbsolutePath());
edit.putBoolean("osmdroid.DebugMode",isDebugMode());
edit.putBoolean("osmdroid.DebugDownloading",isDebugMapTileDownloader());
edit.putBoolean("osmdroid.DebugMapView",isDebugMapView());
edit.putBoolean("osmdroid.DebugTileProvider",isDebugTileProviders());
edit.putBoolean("osmdroid.HardwareAcceleration", isMapViewHardwareAccelerated());
edit.putString("osmdroid.userAgentValue", getUserAgentValue());
save(prefs, edit, mAdditionalHttpRequestProperties, "osmdroid.additionalHttpRequestProperty.");
edit.putLong("osmdroid.gpsWaitTime",gpsWaitTime);
edit.putInt("osmdroid.cacheMapTileCount", cacheMapTileCount);
edit.putInt("osmdroid.tileDownloadThreads", tileDownloadThreads);
edit.putInt("osmdroid.tileFileSystemThreads",tileFileSystemThreads);
edit.putInt("osmdroid.tileDownloadMaxQueueSize",tileDownloadMaxQueueSize);
edit.putInt("osmdroid.tileFileSystemMaxQueueSize",tileFileSystemMaxQueueSize);
edit.putLong("osmdroid.ExpirationExtendedDuration",expirationAdder);
if (expirationOverride!=null)
edit.putLong("osmdroid.ExpirationOverride",expirationOverride);
//TODO save other fields?
edit.putInt("osmdroid.ZoomSpeedDefault", animationSpeedDefault);
edit.putInt("osmdroid.animationSpeedShort", animationSpeedShort);
edit.putBoolean("osmdroid.mapViewRecycler", mapViewRecycler);
edit.commit();
}
/**
* Loading a map from preferences, using a prefix for the prefs keys
*
* @since 5.6.5
* @param pPrefs
* @param pMap
* @param pPrefix
*/
private static void load(final SharedPreferences pPrefs,
final Map<String, String> pMap, final String pPrefix) {
pMap.clear();
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pMap.put(key.substring(pPrefix.length()), pPrefs.getString(key, null));
}
}
}
/**
* Saving a map into preferences, using a prefix for the prefs keys
*
* @since 5.6.5
* @param pPrefs
* @param pEdit
* @param pMap
* @param pPrefix
*/
private static void save(final SharedPreferences pPrefs, final SharedPreferences.Editor pEdit,
final Map<String, String> pMap, final String pPrefix) {
for (final String key : pPrefs.getAll().keySet()) {
if (key.startsWith(pPrefix)) {
pEdit.remove(key);
}
}
for (final Map.Entry<String, String> entry : pMap.entrySet()) {
final String key = pPrefix + entry.getKey();
pEdit.putString(key, entry.getValue());
}
}
@Override
public long getExpirationExtendedDuration() {
return expirationAdder ;
}
@Override
public void setExpirationExtendedDuration(final long period) {
if (period < 0)
expirationAdder=0;
else
expirationAdder=period;
}
@Override
public void setExpirationOverrideDuration(Long period) {
expirationOverride=period;
}
@Override
public Long getExpirationOverrideDuration() {
return expirationOverride;
}
@Override
public void setAnimationSpeedDefault(int durationsMilliseconds) {
this.animationSpeedDefault =durationsMilliseconds;
}
@Override
public int getAnimationSpeedDefault() {
return animationSpeedDefault;
}
@Override
public void setAnimationSpeedShort(int durationsMilliseconds) {
this.animationSpeedShort = durationsMilliseconds;
}
@Override
public int getAnimationSpeedShort() {
return animationSpeedShort;
}
@Override
public boolean isMapViewRecyclerFriendly() {
return mapViewRecycler;
}
@Override
public void setMapViewRecyclerFriendly(boolean enabled) {
this.mapViewRecycler=enabled;
}
}
| #720 possible fix for preventing the auto creation of the "osmdroid" folder
| osmdroid-android/src/main/java/org/osmdroid/config/DefaultConfigurationProvider.java | #720 possible fix for preventing the auto creation of the "osmdroid" folder | <ide><path>smdroid-android/src/main/java/org/osmdroid/config/DefaultConfigurationProvider.java
<ide> import android.content.Context;
<ide> import android.content.SharedPreferences;
<ide> import android.os.Build;
<del>
<add>import android.util.Log;
<add>
<add>import org.osmdroid.api.IMapView;
<ide> import org.osmdroid.tileprovider.modules.SqlTileWriter;
<ide> import org.osmdroid.tileprovider.util.StorageUtils;
<ide>
<ide> */
<ide> public class DefaultConfigurationProvider implements IConfigurationProvider {
<ide>
<del> /*public final String[] preferenceKeys = new String[]{
<del> "osmdroid.basePath",
<del> "osmdroid.cachePath",
<del> "osmdroid.DebugMode",
<del> "osmdroid.DebugDownloading",
<del> "osmdroid.DebugMapView",
<del> "osmdroid.DebugTileProvider",
<del> "osmdroid.HardwareAcceleration",
<del> "osmdroid.userAgentValue",
<del> "osmdroid.gpsWaitTime",
<del> "osmdroid.tileDownloadThreads",
<del> "osmdroid.tileFileSystemThreads",
<del> "osmdroid.tileDownloadMaxQueueSize",
<del> "osmdroid.tileFileSystemMaxQueueSize"
<del> };*/
<del> //<editor-fold>
<add>
<ide> protected long gpsWaitTime =20000;
<ide> protected boolean debugMode= false;
<ide> protected boolean debugMapView = false;
<ide>
<ide> public DefaultConfigurationProvider(){
<ide>
<del> try {
<del> //StorageUtils.getStorage() can return null when android studio is "previewing" a layout
<del> osmdroidBasePath = new File(StorageUtils.getStorage().getAbsolutePath(), "osmdroid");
<del> osmdroidTileCache = new File(getOsmdroidBasePath(), "tiles");
<del> osmdroidBasePath.mkdirs();
<del> osmdroidTileCache.mkdirs();
<del> }catch (Exception ex){
<del> //IO/permissions issue
<del> //trap for android studio layout editor and some for certain devices
<del> //see https://github.com/osmdroid/osmdroid/issues/508
<del> }
<add>
<ide> }
<ide> /**
<ide> * default is 20 seconds
<ide>
<ide> @Override
<ide> public File getOsmdroidBasePath() {
<add> if (osmdroidBasePath==null)
<add> osmdroidBasePath = new File(StorageUtils.getStorage().getAbsolutePath(), "osmdroid");
<add> try {
<add> osmdroidBasePath.mkdirs();
<add> }catch (Exception ex){
<add> Log.d(IMapView.LOGTAG, "Unable to create base path at " + osmdroidBasePath.getAbsolutePath(), ex);
<add> //IO/permissions issue
<add> //trap for android studio layout editor and some for certain devices
<add> //see https://github.com/osmdroid/osmdroid/issues/508
<add> }
<ide> return osmdroidBasePath;
<ide> }
<ide>
<ide>
<ide> @Override
<ide> public File getOsmdroidTileCache() {
<add> if (osmdroidTileCache==null)
<add> osmdroidTileCache = new File(getOsmdroidBasePath(), "tiles");
<add> try {
<add> osmdroidTileCache.mkdirs();
<add> }catch (Exception ex){
<add> Log.d(IMapView.LOGTAG, "Unable to create tile cache path at " + osmdroidTileCache.getAbsolutePath(), ex);
<add> //IO/permissions issue
<add> //trap for android studio layout editor and some for certain devices
<add> //see https://github.com/osmdroid/osmdroid/issues/508
<add> }
<ide> return osmdroidTileCache;
<ide> }
<ide> |
|
Java | apache-2.0 | c7dcd1eb45ea1d01a7c7cb8a26485bff0bd5faac | 0 | heriram/incubator-asterixdb,waans11/incubator-asterixdb,waans11/incubator-asterixdb,heriram/incubator-asterixdb,parshimers/incubator-asterixdb-hyracks,kisskys/incubator-asterixdb,ilovesoup/hyracks,ecarm002/incubator-asterixdb,ty1er/incubator-asterixdb-hyracks,sjaco002/incubator-asterixdb-hyracks,kisskys/incubator-asterixdb,apache/incubator-asterixdb,kisskys/incubator-asterixdb-hyracks,ty1er/incubator-asterixdb,lwhay/hyracks,waans11/incubator-asterixdb,tectronics/hyracks,ty1er/incubator-asterixdb,kisskys/incubator-asterixdb-hyracks,parshimers/incubator-asterixdb-hyracks,amoudi87/hyracks,ecarm002/incubator-asterixdb,tectronics/hyracks,heriram/incubator-asterixdb,ecarm002/incubator-asterixdb,waans11/incubator-asterixdb,apache/incubator-asterixdb,apache/incubator-asterixdb,ecarm002/incubator-asterixdb,parshimers/incubator-asterixdb-hyracks,parshimers/incubator-asterixdb-hyracks,ecarm002/incubator-asterixdb,ilovesoup/hyracks,ty1er/incubator-asterixdb-hyracks,ilovesoup/hyracks,kisskys/incubator-asterixdb,ecarm002/incubator-asterixdb,waans11/incubator-asterixdb-hyracks,apache/incubator-asterixdb,heriram/incubator-asterixdb,ty1er/incubator-asterixdb,kisskys/incubator-asterixdb,ilovesoup/hyracks,amoudi87/hyracks,apache/incubator-asterixdb,sjaco002/incubator-asterixdb-hyracks,ty1er/incubator-asterixdb,heriram/incubator-asterixdb,kisskys/incubator-asterixdb-hyracks,waans11/incubator-asterixdb,lwhay/hyracks,waans11/incubator-asterixdb-hyracks,ty1er/incubator-asterixdb,tectronics/hyracks,waans11/incubator-asterixdb,sjaco002/incubator-asterixdb-hyracks,kisskys/incubator-asterixdb,apache/incubator-asterixdb,ty1er/incubator-asterixdb-hyracks,apache/incubator-asterixdb,sjaco002/incubator-asterixdb-hyracks,ty1er/incubator-asterixdb-hyracks,heriram/incubator-asterixdb,kisskys/incubator-asterixdb,waans11/incubator-asterixdb-hyracks,heriram/incubator-asterixdb,amoudi87/hyracks,waans11/incubator-asterixdb-hyracks,kisskys/incubator-asterixdb-hyracks,kisskys/incubator-asterixdb,ty1er/incubator-asterixdb,ecarm002/incubator-asterixdb,tectronics/hyracks,lwhay/hyracks,waans11/incubator-asterixdb,amoudi87/hyracks,lwhay/hyracks | /*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.tests.am.btree;
import java.io.DataOutput;
import java.io.File;
import org.junit.After;
import org.junit.Before;
import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
public abstract class AbstractBTreeOperatorTest extends AbstractIntegrationTest {
static {
TestStorageManagerComponentHolder.init(8192, 20, 20);
}
protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
protected final IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
protected IIndexDataflowHelperFactory dataflowHelperFactory;
// field, type and key declarations for primary index
protected final int primaryFieldCount = 6;
protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
protected final int primaryKeyFieldCount = 1;
protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
// to be set by subclasses
protected String primaryFileName;
protected IFileSplitProvider primarySplitProvider;
// field, type and key declarations for secondary indexes
protected final int secondaryFieldCount = 2;
protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
protected final int secondaryKeyFieldCount = 2;
protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
protected String secondaryFileName;
protected IFileSplitProvider secondarySplitProvider;
protected ITreeIndexOperatorTestHelper testHelper;
protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
return new BTreeOperatorTestHelper();
}
@Before
public void setup() throws Exception {
testHelper = createTestHelper();
dataflowHelperFactory = createDataFlowHelperFactory();
primaryFileName = testHelper.getPrimaryIndexName();
primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
new File(primaryFileName))) });
secondaryFileName = testHelper.getSecondaryIndexName();
secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
new FileReference(new File(secondaryFileName))) });
// field, type and key declarations for primary index
primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
// field, type and key declarations for secondary indexes
secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
}
protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory();
protected void loadPrimaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
"data/tpch0.001/orders-part1.tbl"))) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
spec.addRoot(primaryBtreeBulkLoad);
runTest(spec);
}
protected void loadSecondaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
// build dummy tuple containing nothing
ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
DataOutput dos = tb.getDataOutput();
tb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
int[] lowKeyFields = null; // - infinity
int[] highKeyFields = null; // + infinity
// scan primary index
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
// sort based on secondary keys
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
primaryRecDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
// load secondary index
int[] fieldPermutation = { 3, 0 };
TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
spec.addRoot(secondaryBtreeBulkLoad);
runTest(spec);
}
protected void insertPipeline(boolean useUpsert) throws Exception {
IndexOp pipelineOperation = useUpsert ? IndexOp.UPSERT : IndexOp.INSERT;
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
"data/tpch0.001/orders-part2.tbl"))) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
// insert into primary index
int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, ordersDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, primaryFieldPermutation, pipelineOperation, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeInsertOp, NC1_ID);
// first secondary index
int[] fieldPermutationB = { 4, 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, ordersDesc, storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, fieldPermutationB, pipelineOperation, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryBtreeInsertOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeInsertOp, 0, secondaryInsertOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
spec.addRoot(nullSink);
runTest(spec);
}
@After
public void cleanup() throws Exception {
testHelper.cleanup(primaryFileName, secondaryFileName);
}
} | hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java | /*
* Copyright 2009-2010 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.tests.am.btree;
import java.io.DataOutput;
import java.io.File;
import org.junit.After;
import org.junit.Before;
import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
public abstract class AbstractBTreeOperatorTest extends AbstractIntegrationTest {
static {
TestStorageManagerComponentHolder.init(8192, 20, 20);
}
protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
protected final IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
protected IIndexDataflowHelperFactory dataflowHelperFactory;
// field, type and key declarations for primary index
protected final int primaryFieldCount = 6;
protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
protected final int primaryKeyFieldCount = 1;
protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
// to be set by subclasses
protected String primaryFileName;
protected IFileSplitProvider primarySplitProvider;
// field, type and key declarations for secondary indexes
protected final int secondaryFieldCount = 2;
protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
protected final int secondaryKeyFieldCount = 2;
protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
protected String secondaryFileName;
protected IFileSplitProvider secondarySplitProvider;
protected ITreeIndexOperatorTestHelper testHelper;
protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
return new BTreeOperatorTestHelper();
}
@Before
public void setup() throws Exception {
testHelper = createTestHelper();
dataflowHelperFactory = createDataFlowHelperFactory();
primaryFileName = testHelper.getPrimaryIndexName();
primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
new File(primaryFileName))) });
secondaryFileName = testHelper.getSecondaryIndexName();
secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
new FileReference(new File(secondaryFileName))) });
// field, type and key declarations for primary index
primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
// field, type and key declarations for secondary indexes
secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
}
protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory();
protected void loadPrimaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
"data/tpch0.001/orders-part1.tbl"))) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
spec.addRoot(primaryBtreeBulkLoad);
runTest(spec);
}
protected void loadSecondaryIndex() throws Exception {
JobSpecification spec = new JobSpecification();
// build dummy tuple containing nothing
ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
DataOutput dos = tb.getDataOutput();
tb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
tb.addFieldEndOffset();
ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE };
RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
int[] lowKeyFields = null; // - infinity
int[] highKeyFields = null; // + infinity
// scan primary index
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
// sort based on secondary keys
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
primaryRecDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
// load secondary index
int[] fieldPermutation = { 3, 0 };
TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
spec.addRoot(secondaryBtreeBulkLoad);
runTest(spec);
}
protected void insertPipeline(boolean useUpsert) throws Exception {
IndexOp pipelineOperation = useUpsert ? IndexOp.INSERT : IndexOp.UPSERT;
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
"data/tpch0.001/orders-part2.tbl"))) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
// insert into primary index
int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, ordersDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, primaryFieldPermutation, pipelineOperation, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeInsertOp, NC1_ID);
// first secondary index
int[] fieldPermutationB = { 4, 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, ordersDesc, storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, fieldPermutationB, pipelineOperation, dataflowHelperFactory,
NoOpOperationCallbackProvider.INSTANCE);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryBtreeInsertOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeInsertOp, 0, secondaryInsertOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
spec.addRoot(nullSink);
runTest(spec);
}
@After
public void cleanup() throws Exception {
testHelper.cleanup(primaryFileName, secondaryFileName);
}
} | Fixed bug in integration test where wrong operation was being chosen for certain tests
git-svn-id: 380da7c1207d3117f55b64aa1fd67529f17f3188@1325 123451ca-8445-de46-9d55-352943316053
| hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java | Fixed bug in integration test where wrong operation was being chosen for certain tests | <ide><path>yracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
<ide> }
<ide>
<ide> protected void insertPipeline(boolean useUpsert) throws Exception {
<del> IndexOp pipelineOperation = useUpsert ? IndexOp.INSERT : IndexOp.UPSERT;
<add> IndexOp pipelineOperation = useUpsert ? IndexOp.UPSERT : IndexOp.INSERT;
<ide> JobSpecification spec = new JobSpecification();
<ide>
<ide> FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File( |
|
Java | mit | 5468d720f8310614d378c556128ca7b6da6644b2 | 0 | Ronneesley/redesocial,Ronneesley/redesocial,Ronneesley/redesocial,Ronneesley/redesocial,Ronneesley/redesocial,Ronneesley/redesocial | package br.com.redesocial.modelo.dao;
import br.com.redesocial.modelo.bo.Categoria;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Statement;
import java.util.List;
/**
* Classe que realiza as operações de acesso ao banco de dados da entidade categoria
* @author Lucas Pereira de Azevedo
* @since 05/09/2017
*/
public class CategoriaDAO extends DAOCRUDBase<Categoria> {
/**
* Método responsável pela inserção de um categoria no banco de dados
* @author Lucas Azevedo
* @param c Categoria a ser inserido
* @throws Exception possíveis exceções que podem acontecer
*/
@Override
public void inserir(Categoria c) throws Exception {
Connection conexao = getConexao();
if(c.getDescricao().equals("")){
throw new Exception("O campo categoria não pode estar vazio!");
}
PreparedStatement pstmt;
pstmt = conexao.prepareStatement("insert into categoria (descricao) values(?)", Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, (String) c.getDescricao());
pstmt.executeUpdate();
c.setId(getId(pstmt));
}
@Override
public void alterar(Categoria dto) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Categoria selecionar(int id) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public List listar() throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void excluir(int id) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
} | codigo/RedeSocial/src/br/com/redesocial/modelo/dao/CategoriaDAO.java | package br.com.redesocial.modelo.dao;
import br.com.redesocial.modelo.bo.Categoria;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.Statement;
import java.util.List;
/**
* Classe que realiza as operações de acesso ao banco de dados da entidade categoria
* @author Lucas Azevedo
* @since 05/09/2017
*/
public class CategoriaDAO extends DAOCRUDBase<Categoria> {
/**
* Método responsável pela inserção de um categoria no banco de dados
* @author Lucas Azevedo
* @param c Categoria a ser inserido
* @throws Exception possíveis exceções que podem acontecer
*/
@Override
public void inserir(Categoria c) throws Exception {
Connection conexao = getConexao();
if(c.getDescricao().equals("")){
throw new Exception("O campo categoria não pode estar vazio!");
}
PreparedStatement pstmt;
pstmt = conexao.prepareStatement("insert into categoria (descricao) values(?)", Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, (String) c.getDescricao());
pstmt.executeUpdate();
c.setId(getId(pstmt));
}
@Override
public void alterar(Categoria dto) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Categoria selecionar(int id) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public List listar() throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public void excluir(int id) throws Exception {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
} | Metodo inserir CAtegoriaDAO
| codigo/RedeSocial/src/br/com/redesocial/modelo/dao/CategoriaDAO.java | Metodo inserir CAtegoriaDAO | <ide><path>odigo/RedeSocial/src/br/com/redesocial/modelo/dao/CategoriaDAO.java
<ide>
<ide> /**
<ide> * Classe que realiza as operações de acesso ao banco de dados da entidade categoria
<del> * @author Lucas Azevedo
<add> * @author Lucas Pereira de Azevedo
<ide> * @since 05/09/2017
<ide> */
<ide> public class CategoriaDAO extends DAOCRUDBase<Categoria> { |
|
Java | apache-2.0 | 8a1d10e6132e5231a0969c31e1fcc80041ec062f | 0 | flexiblepower/fpai-apps,flexiblepower/fpai-apps,flexiblepower/fpai-apps,flexiblepower/fpai-apps,Kisensum/fpai-apps,Kisensum/fpai-apps,Kisensum/fpai-apps,Kisensum/fpai-apps,flexiblepower/fpai-apps,Kisensum/fpai-apps | package net.powermatcher.fpai.peakshaving;
import net.powermatcher.core.concentrator.TransformingConcentrator;
import org.flexiblepower.observation.Observation;
import org.flexiblepower.observation.ObservationConsumer;
import org.flexiblepower.observation.ObservationProvider;
import org.flexiblepower.ral.drivers.uncontrolled.PowerState;
import aQute.bnd.annotation.component.Component;
import aQute.bnd.annotation.component.Reference;
import aQute.bnd.annotation.metatype.Meta;
@Component(immediate = true, designateFactory = TransformingConcentratorInformer.Config.class)
public class TransformingConcentratorInformer implements ObservationConsumer<PowerState> {
public interface Config {
@Meta.AD(deflt = "(agentId=peakshavingconcentrator)",
description = "The filter that is used to determine which transforming concentrator should get the power values")
String
concentrator_target();
@Meta.AD(deflt = "(org.flexiblepower.monitoring.observationOf=something)",
description = "The filter that is used to determine which observation provider should be used to get the power values")
String
observationProvider_target();
}
private TransformingConcentrator concentrator;
@Reference
public void setConcentrator(TransformingConcentrator concentrator) {
this.concentrator = concentrator;
}
@Reference
public void setObservationProvider(ObservationProvider<PowerState> provider) {
provider.subscribe(this);
}
public void unsetObservationProvider(ObservationProvider<PowerState> provider) {
provider.unsubscribe(this);
}
@Override
public void consume(ObservationProvider<? extends PowerState> source,
Observation<? extends PowerState> observation) {
concentrator.setMeasuredFlow(observation.getValue().getCurrentUsage());
}
}
| net.powermatcher.fpai.controller/src/net/powermatcher/fpai/peakshaving/TransformingConcentratorInformer.java | package net.powermatcher.fpai.peakshaving;
import net.powermatcher.core.concentrator.TransformingConcentrator;
import org.flexiblepower.observation.Observation;
import org.flexiblepower.observation.ObservationConsumer;
import org.flexiblepower.observation.ObservationProvider;
import org.flexiblepower.ral.drivers.uncontrolled.PowerState;
import aQute.bnd.annotation.component.Component;
import aQute.bnd.annotation.component.Reference;
import aQute.bnd.annotation.metatype.Meta;
@Component(immediate = true, designateFactory = TransformingConcentratorInformer.Config.class)
public class TransformingConcentratorInformer implements ObservationConsumer<PowerState> {
public interface Config {
@Meta.AD(deflt = "(agentId=peakshavingconcentrator)",
description = "The filter that is used to determine which transforming concentrator should get the power values")
String
concentrator_filter();
@Meta.AD(deflt = "(org.flexiblepower.monitoring.observationOf=something)",
description = "The filter that is used to determine which observation provider should be used to get the power values")
String
observationProvider_filter();
}
private TransformingConcentrator concentrator;
@Reference
public void setConcentrator(TransformingConcentrator concentrator) {
this.concentrator = concentrator;
}
@Reference
public void setObservationProvider(ObservationProvider<PowerState> provider) {
provider.subscribe(this);
}
public void unsetObservationProvider(ObservationProvider<PowerState> provider) {
provider.unsubscribe(this);
}
@Override
public void consume(ObservationProvider<? extends PowerState> source,
Observation<? extends PowerState> observation) {
concentrator.setMeasuredFlow(observation.getValue().getCurrentUsage());
}
}
| Fixed the name of the filter targets
| net.powermatcher.fpai.controller/src/net/powermatcher/fpai/peakshaving/TransformingConcentratorInformer.java | Fixed the name of the filter targets | <ide><path>et.powermatcher.fpai.controller/src/net/powermatcher/fpai/peakshaving/TransformingConcentratorInformer.java
<ide> @Meta.AD(deflt = "(agentId=peakshavingconcentrator)",
<ide> description = "The filter that is used to determine which transforming concentrator should get the power values")
<ide> String
<del> concentrator_filter();
<add> concentrator_target();
<ide>
<ide> @Meta.AD(deflt = "(org.flexiblepower.monitoring.observationOf=something)",
<ide> description = "The filter that is used to determine which observation provider should be used to get the power values")
<ide> String
<del> observationProvider_filter();
<add> observationProvider_target();
<ide> }
<ide>
<ide> private TransformingConcentrator concentrator; |
|
Java | apache-2.0 | 033ccf30fe1d17324bcd33ed3f8346b2d6fb8c11 | 0 | b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl | /*
* Copyright 2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.core.uri;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import com.b2international.commons.exceptions.BadRequestException;
/**
* @since 7.12.0
*/
public class ComponentURITest {
@Test
public void constructorTest() {
final String branchPath = "SNOMEDCT/2019-09-30/SNOMEDCT-SE/2020-07-30/SNOMEDCT-EXT";
final CodeSystemURI uri = new CodeSystemURI(branchPath);
short terminologyComponentId = 150;
final String componentId = "123456789";
ComponentURI componentURI = ComponentURI.of(uri, terminologyComponentId, componentId);
assertEquals(componentURI.codeSystem(), ComponentURI.SLASH_SPLITTER.split(branchPath).iterator().next());
assertEquals(componentURI.terminologyComponentId(), terminologyComponentId);
assertEquals(componentURI.identifier(), componentId);
}
@Test
public void serializationTest() {
final String uri = "LCS1/1542/750/SO";
ComponentURI componentURI = ComponentURI.of(uri);
assertEquals(componentURI.codeSystem(), "LCS1");
assertEquals(componentURI.terminologyComponentId(), 750);
assertEquals(componentURI.identifier(), "SO");
}
@Test(expected = IllegalArgumentException.class)
public void numberOfPartsTest() {
final String incompleteURI = "LCS1/1542";
ComponentURI.of(incompleteURI); //Attempt to parse incomplete component URI
}
@Test(expected = BadRequestException.class)
public void missingCodeSystemTest() {
final String malformedURI = "/750/1542";
ComponentURI.of(malformedURI);
}
@Test(expected = NumberFormatException.class)
public void incorrectTerminologyComponentIdTest() {
final String uri = "LCS1/xyz/1542";
ComponentURI.of(uri);
}
}
| core/com.b2international.snowowl.core.tests/src/com/b2international/snowowl/core/uri/ComponentURITest.java | /*
* Copyright 2020 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.core.uri;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
/**
* @since 7.12.0
*/
public class ComponentURITest {
@Test
public void constructorTest() {
final String branchPath = "SNOMEDCT/2019-09-30/SNOMEDCT-SE/2020-07-30/SNOMEDCT-EXT";
final CodeSystemURI uri = new CodeSystemURI(branchPath);
short terminologyComponentId = 150;
final String componentId = "123456789";
ComponentURI componentURI = ComponentURI.of(uri, terminologyComponentId, componentId);
assertEquals(componentURI.codeSystem(), ComponentURI.SLASH_SPLITTER.split(branchPath).iterator().next());
assertEquals(componentURI.terminologyComponentId(), terminologyComponentId);
assertEquals(componentURI.identifier(), componentId);
}
@Test
public void serializationTest() {
final String uri = "LCS1/1542/750/SO";
ComponentURI componentURI = ComponentURI.of(uri);
assertEquals(componentURI.codeSystem(), "LCS1");
assertEquals(componentURI.terminologyComponentId(), "750");
assertEquals(componentURI.identifier(), "SO");
}
}
| SO-4285: Add further componentURI parse tests | core/com.b2international.snowowl.core.tests/src/com/b2international/snowowl/core/uri/ComponentURITest.java | SO-4285: Add further componentURI parse tests | <ide><path>ore/com.b2international.snowowl.core.tests/src/com/b2international/snowowl/core/uri/ComponentURITest.java
<ide> import static org.junit.Assert.assertEquals;
<ide>
<ide> import org.junit.Test;
<add>
<add>import com.b2international.commons.exceptions.BadRequestException;
<ide>
<ide> /**
<ide> * @since 7.12.0
<ide> final String uri = "LCS1/1542/750/SO";
<ide> ComponentURI componentURI = ComponentURI.of(uri);
<ide> assertEquals(componentURI.codeSystem(), "LCS1");
<del> assertEquals(componentURI.terminologyComponentId(), "750");
<add> assertEquals(componentURI.terminologyComponentId(), 750);
<ide> assertEquals(componentURI.identifier(), "SO");
<ide> }
<ide>
<add> @Test(expected = IllegalArgumentException.class)
<add> public void numberOfPartsTest() {
<add> final String incompleteURI = "LCS1/1542";
<add> ComponentURI.of(incompleteURI); //Attempt to parse incomplete component URI
<add> }
<add>
<add> @Test(expected = BadRequestException.class)
<add> public void missingCodeSystemTest() {
<add> final String malformedURI = "/750/1542";
<add> ComponentURI.of(malformedURI);
<add> }
<add>
<add> @Test(expected = NumberFormatException.class)
<add> public void incorrectTerminologyComponentIdTest() {
<add> final String uri = "LCS1/xyz/1542";
<add> ComponentURI.of(uri);
<add> }
<add>
<ide> } |
|
Java | apache-2.0 | error: pathspec 'src/org/ensembl/healthcheck/testcase/generic/ComparePreviousDatabases.java' did not match any file(s) known to git
| 3b45553621791c366e67c935d923c6809e295592 | 1 | thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck | /*
* Copyright (C) 2003 EBI, GRL
*
* This library is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free Software
* Foundation; either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with
* this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place,
* Suite 330, Boston, MA 02111-1307 USA
*/
package org.ensembl.healthcheck.testcase.generic;
import java.util.Map;
import java.util.Set;
import org.ensembl.healthcheck.DatabaseRegistry;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Species;
import org.ensembl.healthcheck.testcase.MultiDatabaseTestCase;
import org.ensembl.healthcheck.util.DBUtils;
/**
* Check that all species and database types in the previous release are represented in the current release.
*/
public class ComparePreviousDatabases extends MultiDatabaseTestCase {
/**
* Create a new instance of ComparePreviousDatabases
*/
public ComparePreviousDatabases() {
addToGroup("release");
setDescription("Check that all species and database types in the previous release are represented in the current release.");
}
/**
* Check various aspects of the meta table.
*
* @param dbr
* The database registry containing all the specified databases.
* @return True if the meta information is consistent within species.
*/
public boolean run(DatabaseRegistry dbr) {
boolean result = true;
// look at all databases on the secondary server, check that we have an equivalent
DatabaseRegistry secondaryDBR = DBUtils.getSecondaryDatabaseRegistry();
// get the map of species, with associated set of types, for both primary and secondary servers
Map<Species, Set<DatabaseType>> primarySpeciesAndTypes = dbr.getSpeciesTypeMap();
Map<Species, Set<DatabaseType>> secondarySpeciesAndTypes = secondaryDBR.getSpeciesTypeMap();
for (Species s : secondarySpeciesAndTypes.keySet()) {
if (s.equals(Species.UNKNOWN) || s.equals(Species.ANCESTRAL_SEQUENCES)) {
continue;
}
// fail at once if there are no databases on the main server for this species at all
if (!primarySpeciesAndTypes.containsKey(s)) {
ReportManager.problem(this, "", String.format("Secondary server contains at least one database for %s (e.g. %s) but there are none on the primary server", s, (secondaryDBR.getAll(s))[0].getName()));
result = false;
} else {
// now check by type
for (DatabaseType t : secondarySpeciesAndTypes.get(s)) {
Set<DatabaseType> primaryTypes = primarySpeciesAndTypes.get(s);
if (!primaryTypes.contains(t)) {
ReportManager.problem(this, "", String.format("Secondary server has a %s database for %s but there is no equivalent on the primary server", t, s));
result = false;
}
}
}
}
return result;
}
} // ComparePreviousDatabases
| src/org/ensembl/healthcheck/testcase/generic/ComparePreviousDatabases.java | Check that all species and database types in the previous release are represented in the current release.
| src/org/ensembl/healthcheck/testcase/generic/ComparePreviousDatabases.java | Check that all species and database types in the previous release are represented in the current release. | <ide><path>rc/org/ensembl/healthcheck/testcase/generic/ComparePreviousDatabases.java
<add>/*
<add> * Copyright (C) 2003 EBI, GRL
<add> *
<add> * This library is free software; you can redistribute it and/or modify it under the
<add> * terms of the GNU Lesser General Public License as published by the Free Software
<add> * Foundation; either version 2.1 of the License, or (at your option) any later version.
<add> *
<add> * This library is distributed in the hope that it will be useful, but WITHOUT ANY
<add> * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
<add> * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
<add> *
<add> * You should have received a copy of the GNU Lesser General Public License along with
<add> * this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place,
<add> * Suite 330, Boston, MA 02111-1307 USA
<add> */
<add>
<add>package org.ensembl.healthcheck.testcase.generic;
<add>
<add>import java.util.Map;
<add>import java.util.Set;
<add>
<add>import org.ensembl.healthcheck.DatabaseRegistry;
<add>import org.ensembl.healthcheck.DatabaseRegistryEntry;
<add>import org.ensembl.healthcheck.DatabaseType;
<add>import org.ensembl.healthcheck.ReportManager;
<add>import org.ensembl.healthcheck.Species;
<add>import org.ensembl.healthcheck.testcase.MultiDatabaseTestCase;
<add>import org.ensembl.healthcheck.util.DBUtils;
<add>
<add>/**
<add> * Check that all species and database types in the previous release are represented in the current release.
<add> */
<add>
<add>public class ComparePreviousDatabases extends MultiDatabaseTestCase {
<add>
<add> /**
<add> * Create a new instance of ComparePreviousDatabases
<add> */
<add> public ComparePreviousDatabases() {
<add> addToGroup("release");
<add> setDescription("Check that all species and database types in the previous release are represented in the current release.");
<add> }
<add>
<add> /**
<add> * Check various aspects of the meta table.
<add> *
<add> * @param dbr
<add> * The database registry containing all the specified databases.
<add> * @return True if the meta information is consistent within species.
<add> */
<add> public boolean run(DatabaseRegistry dbr) {
<add>
<add> boolean result = true;
<add>
<add> // look at all databases on the secondary server, check that we have an equivalent
<add> DatabaseRegistry secondaryDBR = DBUtils.getSecondaryDatabaseRegistry();
<add>
<add> // get the map of species, with associated set of types, for both primary and secondary servers
<add> Map<Species, Set<DatabaseType>> primarySpeciesAndTypes = dbr.getSpeciesTypeMap();
<add> Map<Species, Set<DatabaseType>> secondarySpeciesAndTypes = secondaryDBR.getSpeciesTypeMap();
<add>
<add> for (Species s : secondarySpeciesAndTypes.keySet()) {
<add>
<add> if (s.equals(Species.UNKNOWN) || s.equals(Species.ANCESTRAL_SEQUENCES)) {
<add> continue;
<add> }
<add>
<add> // fail at once if there are no databases on the main server for this species at all
<add> if (!primarySpeciesAndTypes.containsKey(s)) {
<add>
<add> ReportManager.problem(this, "", String.format("Secondary server contains at least one database for %s (e.g. %s) but there are none on the primary server", s, (secondaryDBR.getAll(s))[0].getName()));
<add> result = false;
<add>
<add> } else {
<add>
<add> // now check by type
<add> for (DatabaseType t : secondarySpeciesAndTypes.get(s)) {
<add>
<add> Set<DatabaseType> primaryTypes = primarySpeciesAndTypes.get(s);
<add>
<add> if (!primaryTypes.contains(t)) {
<add>
<add> ReportManager.problem(this, "", String.format("Secondary server has a %s database for %s but there is no equivalent on the primary server", t, s));
<add> result = false;
<add>
<add> }
<add> }
<add>
<add> }
<add>
<add> }
<add>
<add> return result;
<add>
<add> }
<add>
<add>} // ComparePreviousDatabases |
|
Java | apache-2.0 | 9c579fca20d611a0c7fa166957ee69f5e7574eb3 | 0 | Doctoror/PainlessMusicPlayer,Doctoror/FuckOffMusicPlayer,Doctoror/PainlessMusicPlayer,Doctoror/FuckOffMusicPlayer,Doctoror/PainlessMusicPlayer | /*
* Copyright (C) 2016 Yaroslav Mytkalyk
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.doctoror.fuckoffmusicplayer;
import com.doctoror.fuckoffmusicplayer.settings.Theme;
import com.f2prateek.dart.InjectExtra;
import android.content.ComponentName;
import android.content.Intent;
import android.media.AudioManager;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.MenuItem;
public abstract class BaseActivity extends AppCompatActivity {
private Theme mTheme;
@Theme.ThemeType
private int mThemeUsed;
private boolean mFragmentTransactionsAllowed;
@Override
protected void onCreate(@Nullable final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setVolumeControlStream(AudioManager.STREAM_MUSIC);
mTheme = Theme.getInstance(this);
mThemeUsed = mTheme.getThemeType();
mFragmentTransactionsAllowed = true;
}
protected final Theme getTheme1() {
return mTheme;
}
protected final void restart() {
final Intent intent = new Intent(this, getClass());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
finish();
startActivity(intent);
overridePendingTransition(0, 0);
}
@Override
protected void onStart() {
super.onStart();
mFragmentTransactionsAllowed = true;
// Theme changed while this Activity was in background
if (mThemeUsed != mTheme.getThemeType()) {
restart();
}
}
@Override
protected void onResume() {
super.onResume();
mFragmentTransactionsAllowed = true;
}
@Override
protected void onSaveInstanceState(final Bundle outState) {
super.onSaveInstanceState(outState);
mFragmentTransactionsAllowed = false;
}
protected final boolean areFragmentTransactionsAllowed() {
return mFragmentTransactionsAllowed;
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
final Intent parent = getParentActivityIntent();
if (parent != null) {
navigateUpTo(getParentActivityIntent());
} else {
finish();
}
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Nullable
@Override
public Intent getParentActivityIntent() {
final Intent intent = super.getParentActivityIntent();
if (intent != null) {
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
return intent;
}
@Override
public boolean navigateUpTo(final Intent upIntent) {
ComponentName destInfo = upIntent.getComponent();
if (destInfo == null) {
destInfo = upIntent.resolveActivity(getPackageManager());
if (destInfo == null) {
return false;
}
}
startActivity(upIntent);
finish();
return true;
}
}
| app/src/main/java/com/doctoror/fuckoffmusicplayer/BaseActivity.java | /*
* Copyright (C) 2016 Yaroslav Mytkalyk
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.doctoror.fuckoffmusicplayer;
import com.doctoror.fuckoffmusicplayer.settings.Theme;
import com.f2prateek.dart.InjectExtra;
import android.content.ComponentName;
import android.content.Intent;
import android.media.AudioManager;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.MenuItem;
public abstract class BaseActivity extends AppCompatActivity {
@Nullable
@InjectExtra
Intent parentActivityIntent;
private Theme mTheme;
@Theme.ThemeType
private int mThemeUsed;
@Override
protected void onCreate(@Nullable final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setVolumeControlStream(AudioManager.STREAM_MUSIC);
mTheme = Theme.getInstance(this);
mThemeUsed = mTheme.getThemeType();
}
protected final Theme getTheme1() {
return mTheme;
}
protected final void restart() {
final Intent intent = new Intent(this, getClass());
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
finish();
startActivity(intent);
overridePendingTransition(0, 0);
}
@Override
protected void onStart() {
super.onStart();
// Theme changed while this Activity was in background
if (mThemeUsed != mTheme.getThemeType()) {
restart();
}
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
final Intent parent = getParentActivityIntent();
if (parent != null) {
navigateUpTo(getParentActivityIntent());
} else {
finish();
}
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Nullable
@Override
public Intent getParentActivityIntent() {
final Intent intent = parentActivityIntent != null
? parentActivityIntent : super.getParentActivityIntent();
if (intent != null) {
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
return intent;
}
@Override
public boolean navigateUpTo(final Intent upIntent) {
ComponentName destInfo = upIntent.getComponent();
if (destInfo == null) {
destInfo = upIntent.resolveActivity(getPackageManager());
if (destInfo == null) {
return false;
}
}
startActivity(upIntent);
finish();
return true;
}
}
| Added fragment transaction allowance flag
| app/src/main/java/com/doctoror/fuckoffmusicplayer/BaseActivity.java | Added fragment transaction allowance flag | <ide><path>pp/src/main/java/com/doctoror/fuckoffmusicplayer/BaseActivity.java
<ide>
<ide> public abstract class BaseActivity extends AppCompatActivity {
<ide>
<del> @Nullable
<del> @InjectExtra
<del> Intent parentActivityIntent;
<del>
<ide> private Theme mTheme;
<ide>
<ide> @Theme.ThemeType
<ide> private int mThemeUsed;
<add>
<add> private boolean mFragmentTransactionsAllowed;
<ide>
<ide> @Override
<ide> protected void onCreate(@Nullable final Bundle savedInstanceState) {
<ide>
<ide> mTheme = Theme.getInstance(this);
<ide> mThemeUsed = mTheme.getThemeType();
<add>
<add> mFragmentTransactionsAllowed = true;
<ide> }
<ide>
<ide> protected final Theme getTheme1() {
<ide> @Override
<ide> protected void onStart() {
<ide> super.onStart();
<add> mFragmentTransactionsAllowed = true;
<ide> // Theme changed while this Activity was in background
<ide> if (mThemeUsed != mTheme.getThemeType()) {
<ide> restart();
<ide> }
<add> }
<add>
<add> @Override
<add> protected void onResume() {
<add> super.onResume();
<add> mFragmentTransactionsAllowed = true;
<add> }
<add>
<add> @Override
<add> protected void onSaveInstanceState(final Bundle outState) {
<add> super.onSaveInstanceState(outState);
<add> mFragmentTransactionsAllowed = false;
<add> }
<add>
<add> protected final boolean areFragmentTransactionsAllowed() {
<add> return mFragmentTransactionsAllowed;
<ide> }
<ide>
<ide> @Override
<ide> @Nullable
<ide> @Override
<ide> public Intent getParentActivityIntent() {
<del> final Intent intent = parentActivityIntent != null
<del> ? parentActivityIntent : super.getParentActivityIntent();
<add> final Intent intent = super.getParentActivityIntent();
<ide> if (intent != null) {
<ide> intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
<ide> } |
|
JavaScript | bsd-3-clause | fa71dcac5da9adadaf537d1e86a6ce75e73c80ce | 0 | corner82/sanalFabrika,corner82/sanalFabrika,corner82/sanalFabrika,corner82/sanalFabrika | $(document).ready(function () {
/**
* easyui tree extend for 'unselect' event
* @author Mustafa Zeynel Dağlı
* @since 04/04/2016
*/
$.extend($.fn.tree.methods,{
unselect:function(jq,target){
return jq.each(function(){
var opts = $(this).tree('options');
$(target).removeClass('tree-node-selected');
if (opts.onUnselect){
opts.onUnselect.call(this, $(this).tree('getNode',target));
}
});
}
});
/**
* privileges datagrid
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
$('#tt_grid_dynamic_privileges').datagrid({
onDblClickRow : function (index, row) {
},
url : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
queryParams: {
pk: $('#pk').val(),
subject: 'datagrid',
url : 'pkFillPrivilegesOfRolesList_sysAclPrivilege',
sort : 'id',
order : 'desc',
/*machine_groups_id : null,
filterRules:null*/
},
width : '100%',
singleSelect:true,
pagination : true,
collapsible:true,
method:'get',
idField:'id',
//fit:true,
//fitColumns : true,
remoteFilter: true,
remoteSort:true,
multiSort:false,
columns:
[[
{field:'id',title:'ID'},
{field:'privilege_name',title:'Yetki',sortable:true,width:200},
{field:'role_name_tr',title:'Rol',sortable:true,width:200},
{field:'resource_name',title:'Resource',sortable:true,width:100},
{field:'action',title:'Action',width:80,align:'center',
formatter:function(value,row,index){
var u = '<button style="padding : 2px 4px;" title="Servis Atamaları Yap" class="btn btn-info" type="button" onclick="return privilegeServiceAttachDialog('+row.id+', { privilege_name : \''+row.privilege_name+'\',\n\ \n\
role_name_tr : \''+row.role_name_tr+'\',\n\
resource_name : \''+row.resource_name+'\'} );"><i class="fa fa-exchange"></i></button>';
return u;
}
},
]]
});
$('#tt_grid_dynamic_privileges').datagrid('enableFilter');
/*
*
* @type @call;$@call;loadImager
* @Since 28/07/2016
* @Author Mustafa Zeynel Dagli
* @Purpose this variable is to create loader image for roles tree
* this imager goes to #loading-image div in html.
* imager will be removed on resource / roles tree onLoadSuccess method.
*/
var loader = $("#loading-image").loadImager();
loader.loadImager('appendImage');
/**
* multilanguage plugin
* @type Lang
*/
var lang = new Lang();
lang.dynamic($('#ln').val(), '/plugins/jquery-lang-js-master/langpack/'+$('#ln').val()+'.json');
lang.init({
defaultLang: 'en'
});
lang.change($('#ln').val());
var sm = $(window).successMessage();
var dm = $(window).dangerMessage();
var wm = $(window).warningMessage();
var wcm = $(window).warningComplexMessage({ denyButtonLabel : 'Vazgeç' ,
actionButtonLabel : 'İşleme devam et'});
/*
* ACL resource and rol tree
* Mustafa Zeynel Dağlı
* 28/07/2016
*/
$('#tt_tree_menu2').tree({
url: 'https://proxy.sanalfabrika.com/SlimProxyBoot.php?url=pkFillResourceGroups_sysAclResources&pk=' + $("#pk").val()+ '&language_code='+$("#langCode").val(),
method: 'get',
animate: true,
checkbox: false,
cascadeCheck: false,
lines: true,
onLoadSuccess: function (node, data) {
loader.loadImager('removeLoadImage');
},
formatter: function (node) {
var s = node.text;
var id = node.id;
if (node.attributes.root == 'false') {
s += ' <i class="fa fa-level-down" title="Role bağlı yetkileri tabloya doldur" onclick="fillPrivilegeDatagrid('+id+', '+node.resource_id+');"></i>';
return s;
}
return s;
}
});
/**
* privilege datagrid is filled due to Resource/role tree role id
* @param {type} id
* @returns {undefined}
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
window.fillPrivilegeDatagrid = function(id, resource_id) {
var loaderInsertBlock = $("#loading-image-crud").loadImager();
loaderInsertBlock.loadImager('appendImage');
var id = id;
$('#tt_grid_dynamic_privileges').datagrid({
url : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
queryParams: {
pk: $('#pk').val(),
subject: 'datagrid',
url : 'pkFillPrivilegesOfRolesList_sysAclPrivilege',
sort : 'id',
order : 'desc',
role_id : id,
resource_id: resource_id,
},
onLoadSuccess : function(data) {
loaderInsertBlock.loadImager('removeLoadImage');
}
});
$('#tt_grid_dynamic_privileges').datagrid('reload');
$('#tt_grid_dynamic_privileges').datagrid('enableFilter');
}
// Left menuyu oluşturmak için çağırılan fonksiyon...
$.fn.leftMenuFunction();
/**
* wrapper for ACL privilege and Rest Services attachment process
* @param {type} nodeID
* @param {type} nodeName
* @returns {Boolean}
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
window.privilegeServiceAttachDialog = function (id, row) {
window.gridReloadController = false;
var rrp_id = id;
//console.log(row);
BootstrapDialog.show({
title: '"'+ row.resource_name + '" Resource, "'+ row.role_name_tr + '" Rolü, "'+ row.privilege_name + '" Yetkisinde işlem yapmaktasınız ...',
message: function (dialogRef) {
var dialogRef = dialogRef;
var $message = $(' <div class="row">\n\
<div class="col-md-12">\n\
<div id="loading-image-crud-popup" class="box box-primary">\n\
<form id="aclServiceFormPopup" method="get" class="form-horizontal">\n\
<div class="hr-line-dashed"></div>\n\
<div class="form-group" style="padding-top: 10px;" >\n\
<label class="col-sm-2 control-label">Servisler</label>\n\
<div class="col-sm-10">\n\
<div class="input-group" id="nonAttachedTree">\n\
<div class="input-group-addon" >\n\
<i class="fa fa-hand-o-right"></i>\n\
</div>\n\
<ul id="tt_tree_services_popup" class="easyui-tree" ></ul>\n\
</div>\n\
</div>\n\
</div>\n\
<div class="form-group">\n\
<label class="col-sm-2 control-label">Yetkiye Atanmış Servisler</label>\n\
<div class="col-sm-10">\n\
<div class="input-group" id="attachedTags">\n\
<div class="input-group-addon">\n\
<i class="fa fa-hand-o-right"></i>\n\
</div>\n\
<div style="margin-bottom: -10px;" class="tag-container-popup">\n\
<ul id="test-cabin-popup" class="tag-box"></ul>\n\
</div>\n\
</div>\n\
</div>\n\
</div>\n\
</form>\n\
</div>\n\
</div>\n\
</div>');
return $message;
},
type: BootstrapDialog.TYPE_PRIMARY,
onshown : function () {
window.tagBuilderPopup = $('#test-cabin-popup').tagCabin({
tagCopy : false,
tagDeletable : true,
tagBox : $('.tag-container-popup').find('ul'),
dataMapper : {attributes : Array('rrp_id',
'restservices_id',
'resource_id',
'role_id',
'privilege_id',
'services_group_id')}
});
window.tagBuilderPopup.tagCabin({
onTagRemoved : function(event, data) {
var elementData = data.element;
var id = data.id;
window.deleteServicePrivilegeDialog(id, elementData);
}
});
var nonAttachedTreeLoadImage = $("#nonAttachedTree").loadImager();
nonAttachedTreeLoadImage.loadImager('appendImage');
var attachedTagsLoadImage = $("#attachedTags").loadImager();
attachedTagsLoadImage.loadImager('appendImage');
var ajPopUpMachProp = $('#test-cabin-popup').ajaxCallWidget({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkFillRestServicesOfPrivileges_sysAclRrpRestservices' ,
language_code : $('#langCode').val(),
id : id,
pk : $("#pk").val()
}
})
ajPopUpMachProp.ajaxCallWidget ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage({
onShown : function () {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
}
});
dm.dangerMessage('show', 'Kategoriye Ait Makina Özellikleri Yüklenememiştir...',
'Kategoriye ait makina özellikleri yüklenememiştir,msistem yöneticisi ile temasa geçiniz...');
},
onSuccess : function (event, data) {
attachedTagsLoadImage.loadImager('removeLoadImage');
window.tagBuilderPopup.tagCabin('addTags', data);
},
onErrorDataNull : function (event) {
wm.warningMessage('resetOnShown');
wm.warningMessage('show', 'Servis Bulunamamıştır', 'Yetkiye atanmış servis bulunamamıştır!');
attachedTagsLoadImage.loadImager('removeLoadImage');
},
})
ajPopUpMachProp.ajaxCallWidget('call');
$('#tt_tree_services_popup').tree({
url: 'https://proxy.sanalfabrika.com/SlimProxyBoot.php?url=pkFillNotInRestServicesOfPrivilegesTree_sysAclRrpRestservices&pk=' + $("#pk").val()+ '&language_code='+$("#langCode").val()+ '&rrp_id='+id,
method: 'get',
animate: true,
checkbox: false,
cascadeCheck: false,
lines: true,
onLoadSuccess: function (node, data) {
nonAttachedTreeLoadImage.loadImager('removeLoadImage');
},
onSelect: function(node) {
},
formatter: function (node) {
var s = node.text;
var id = node.id;
var services_group_id = node.attributes.services_group_id;
if (node.attributes.root == 'false') {
s += ' <i class="fa fa-level-down" title="Servisi Yetkiye Bağla" onclick="attachServiceToPrivilege('+rrp_id+', '+id+' , '+services_group_id+', \''+node.text+'\');"></i>';
return s;
}
return s;
}
});
},
onhide : function() {
/*if(window.gridReloadController == true) {
$('#tt_grid_dynamic').datagrid('reload');
}*/
},
});
return false;
}
/**
* wrapper class for pop up and delete service from specific
* ACL privilege
* @param {integer} nodeID
* @returns {null}
* @author Mustafa Zeynel Dağlı
* @since 29/07/2016
*/
window.deleteServicePrivilegeDialog= function(id, element){
var id = id;
wcm.warningComplexMessage({onConfirm : function(event, data) {
deleteServicePrivilege(id, element);
}
});
wcm.warningComplexMessage('show', 'Makina Özelliğini Kategoriden Silme İşlemi Gerçekleştirmek Üzeresiniz!',
'Makina özelliğini kategoriden silmek üzeresiniz, makina özelliği silme işlemi geri alınamaz!! ');
}
/**
* delete service from a specific ACL privilege
* @param {type} id
* @param {type} element
* @param {type} machine_group_id
* @returns {undefined}
* @since 29/07/2016
*/
window.deleteServicePrivilege = function(id, element) {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
//var ajPopUpDelete = $(window).ajaxCall({
var ajPopUpDelete = $("#loading-image-crud-popup").ajaxCall({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkDelete_sysAclRrpRestservices' ,
id : id,
pk : $("#pk").val()
}
});
ajPopUpDelete.ajaxCall ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage('resetOnShown');
dm.dangerMessage('show', 'Yetki / Servis Silme İşlemi Başarısız...',
'Yetkiden servis silinememiştir, sistem yöneticisi ile temasa geçiniz...');
console.error('"pkDelete_sysAclRrpRestservices" servis hatası->'+textStatus);
},
onSuccess : function (event, data) {
sm.successMessage({
onShown : function() {
loader.loadImager('removeLoadImage');
parentNode = $('#tt_tree_services_popup').tree('find', element.attr('data-services_group_id'));
$('#tt_tree_services_popup').tree('select', parentNode.target);
$('#tt_tree_services_popup').tree('expand', parentNode.target);
//$('#tt_tree_services_popup').tree('collapseAll');
$('#tt_tree_services_popup').tree('append', {
parent: parentNode.target,
data: [{
attributes:{
active: 0,
description: '',
last_node : 'true',
root : 'false',
service : 'true',
services_group_id : element.attr('data-services_group_id'),
},
id: element.attr('data-restservices_id'),
text: element.text(),
checked: false,
state : 'open',
},]
});
window.tagBuilderPopup.tagCabin('removeTag', element);
}
});
sm.successMessage('show', 'Yetki / Servis Silme İşleminiz Başarılı...',
'Yetki / Servis silme işleminiz başarılı...')
},
onError23503 : function (event, data) {
wm.warningMessage()('resetOnShown');
wm.warningMessage('show', 'Silme İşlemi Gerçekleştiremezsiniz !',
'Servise bağlı bir operasyon tanımlanmıştır, veri bütünlüğünün bozulmaması için\n\
öncelikle servisin bağlı olduğu operasyonun silinmesi gerekmektedir');
loader.loadImager('removeLoadImage');
}
});
ajPopUpDelete.ajaxCall('call');
}
/**
* attach rest service end point and ACL privilege
* @param {type} rrp_id
* @param {type} restservices_id
* @param {type} service_name
* @returns {undefined}
* @author Mustafa Zeynel Dağlı
* @since 29/07/2016
*/
window.attachServiceToPrivilege = function(rrp_id, restservices_id, services_group_id, service_name) {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
var ajServiceAttach = $(window).ajaxCall({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkInsert_sysAclRrpRestservices' ,
language_code : $('#langCode').val(),
rrp_id : rrp_id,
restservices_id : restservices_id,
description : '',
pk : $("#pk").val()
}
})
ajServiceAttach.ajaxCall ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage({
onShown : function () {
}
});
dm.dangerMessage('show', 'Service Yetkiye Atanamamıştır!...',
'Service ilgili yetkiye atanamamıştır,sistem yöneticisi ile temasa geçiniz...');
},
onSuccess : function (event, data) {
var id = data.lastInsertId;
sm.successMessage({
onShown: function( event, data ) {
loader.loadImager('removeLoadImage');
window.tagBuilderPopup.tagCabin('addTagManuallyDataAttr',
id,
service_name,
{services_group_id : services_group_id,
rrp_id : rrp_id,
restservices_id : restservices_id,
description : ''});
selectedTreeItem = $('#tt_tree_services_popup').tree('find', restservices_id);
$('#tt_tree_services_popup').tree('remove', selectedTreeItem.target);
}
});
sm.successMessage('show', 'Servis Yetki Atama İşlemi Başarılı...',
'Servise yetki atama işlemi gerçekleştirdiniz... ',
data);
},
onError23505 : function (event, data) {
dm.dangerMessage({
onShown : function(event, data) {
loader.loadImager('removeLoadImage');
}
});
dm.dangerMessage('show', 'Servis Yetki İşlemi Başarısız...',
'Servis ilgili yetki ile daha önce ilişkilendirilmiştir, yeni bir servis deneyiniz... ');
},
});
ajServiceAttach.ajaxCall('call');
}
});
| public/jsinline/admin_aclprivilegeservices.js | $(document).ready(function () {
/**
* easyui tree extend for 'unselect' event
* @author Mustafa Zeynel Dağlı
* @since 04/04/2016
*/
$.extend($.fn.tree.methods,{
unselect:function(jq,target){
return jq.each(function(){
var opts = $(this).tree('options');
$(target).removeClass('tree-node-selected');
if (opts.onUnselect){
opts.onUnselect.call(this, $(this).tree('getNode',target));
}
});
}
});
/**
* privileges datagrid
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
$('#tt_grid_dynamic_privileges').datagrid({
onDblClickRow : function (index, row) {
},
url : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
queryParams: {
pk: $('#pk').val(),
subject: 'datagrid',
url : 'pkFillPrivilegesOfRolesList_sysAclPrivilege',
sort : 'id',
order : 'desc',
/*machine_groups_id : null,
filterRules:null*/
},
width : '100%',
singleSelect:true,
pagination : true,
collapsible:true,
method:'get',
idField:'id',
//fit:true,
//fitColumns : true,
remoteFilter: true,
remoteSort:true,
multiSort:false,
columns:
[[
{field:'id',title:'ID'},
{field:'privilege_name',title:'Yetki',sortable:true,width:200},
{field:'role_name_tr',title:'Rol',sortable:true,width:200},
{field:'resource_name',title:'Resource',sortable:true,width:100},
{field:'action',title:'Action',width:80,align:'center',
formatter:function(value,row,index){
var u = '<button style="padding : 2px 4px;" title="Servis Atamaları Yap" class="btn btn-info" type="button" onclick="return privilegeServiceAttachDialog('+row.id+', { privilege_name : \''+row.privilege_name+'\',\n\ \n\
role_name_tr : \''+row.role_name_tr+'\',\n\
resource_name : \''+row.resource_name+'\'} );"><i class="fa fa-exchange"></i></button>';
return u;
}
},
]]
});
$('#tt_grid_dynamic_privileges').datagrid('enableFilter');
/*
*
* @type @call;$@call;loadImager
* @Since 28/07/2016
* @Author Mustafa Zeynel Dagli
* @Purpose this variable is to create loader image for roles tree
* this imager goes to #loading-image div in html.
* imager will be removed on resource / roles tree onLoadSuccess method.
*/
var loader = $("#loading-image").loadImager();
loader.loadImager('appendImage');
/**
* multilanguage plugin
* @type Lang
*/
var lang = new Lang();
lang.dynamic($('#ln').val(), '/plugins/jquery-lang-js-master/langpack/'+$('#ln').val()+'.json');
lang.init({
defaultLang: 'en'
});
lang.change($('#ln').val());
var sm = $(window).successMessage();
var dm = $(window).dangerMessage();
var wm = $(window).warningMessage();
var wcm = $(window).warningComplexMessage({ denyButtonLabel : 'Vazgeç' ,
actionButtonLabel : 'İşleme devam et'});
/*
* ACL resource and rol tree
* Mustafa Zeynel Dağlı
* 28/07/2016
*/
$('#tt_tree_menu2').tree({
url: 'https://proxy.sanalfabrika.com/SlimProxyBoot.php?url=pkFillResourceGroups_sysAclResources&pk=' + $("#pk").val()+ '&language_code='+$("#langCode").val(),
method: 'get',
animate: true,
checkbox: false,
cascadeCheck: false,
lines: true,
onLoadSuccess: function (node, data) {
loader.loadImager('removeLoadImage');
},
formatter: function (node) {
var s = node.text;
var id = node.id;
if (node.attributes.root == 'false') {
s += ' <i class="fa fa-level-down" title="Role bağlı yetkileri tabloya doldur" onclick="fillPrivilegeDatagrid('+id+', '+node.resource_id+');"></i>';
return s;
}
return s;
}
});
/**
* privilege datagrid is filled due to Resource/role tree role id
* @param {type} id
* @returns {undefined}
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
window.fillPrivilegeDatagrid = function(id, resource_id) {
var loaderInsertBlock = $("#loading-image-crud").loadImager();
loaderInsertBlock.loadImager('appendImage');
var id = id;
$('#tt_grid_dynamic_privileges').datagrid({
url : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
queryParams: {
pk: $('#pk').val(),
subject: 'datagrid',
url : 'pkFillPrivilegesOfRolesList_sysAclPrivilege',
sort : 'id',
order : 'desc',
role_id : id,
resource_id: resource_id,
},
onLoadSuccess : function(data) {
loaderInsertBlock.loadImager('removeLoadImage');
}
});
$('#tt_grid_dynamic_privileges').datagrid('reload');
$('#tt_grid_dynamic_privileges').datagrid('enableFilter');
}
// Left menuyu oluşturmak için çağırılan fonksiyon...
$.fn.leftMenuFunction();
/**
* wrapper for ACL privilege and Rest Services attachment process
* @param {type} nodeID
* @param {type} nodeName
* @returns {Boolean}
* @author Mustafa Zeynel Dağlı
* @since 28/07/2016
*/
window.privilegeServiceAttachDialog = function (id, row) {
window.gridReloadController = false;
var rrp_id = id;
//console.log(row);
BootstrapDialog.show({
title: '"'+ row.resource_name + '" Resource, "'+ row.role_name_tr + '" Rolü, "'+ row.privilege_name + '" Yetkisinde işlem yapmaktasınız ...',
message: function (dialogRef) {
var dialogRef = dialogRef;
var $message = $(' <div class="row">\n\
<div class="col-md-12">\n\
<div id="loading-image-crud-popup" class="box box-primary">\n\
<form id="aclServiceFormPopup" method="get" class="form-horizontal">\n\
<div class="hr-line-dashed"></div>\n\
<div class="form-group" style="padding-top: 10px;" >\n\
<label class="col-sm-2 control-label">Servisler</label>\n\
<div class="col-sm-10">\n\
<div class="input-group" id="nonAttachedTree">\n\
<div class="input-group-addon" >\n\
<i class="fa fa-hand-o-right"></i>\n\
</div>\n\
<ul id="tt_tree_services_popup" class="easyui-tree" ></ul>\n\
</div>\n\
</div>\n\
</div>\n\
<div class="form-group">\n\
<label class="col-sm-2 control-label">Yetkiye Atanmış Servisler</label>\n\
<div class="col-sm-10">\n\
<div class="input-group" id="attachedTags">\n\
<div class="input-group-addon">\n\
<i class="fa fa-hand-o-right"></i>\n\
</div>\n\
<div style="margin-bottom: -10px;" class="tag-container-popup">\n\
<ul id="test-cabin-popup" class="tag-box"></ul>\n\
</div>\n\
</div>\n\
</div>\n\
</div>\n\
</form>\n\
</div>\n\
</div>\n\
</div>');
return $message;
},
type: BootstrapDialog.TYPE_PRIMARY,
onshown : function () {
window.tagBuilderPopup = $('#test-cabin-popup').tagCabin({
tagCopy : false,
tagDeletable : true,
tagBox : $('.tag-container-popup').find('ul'),
dataMapper : {attributes : Array('rrp_id',
'restservices_id',
'resource_id',
'role_id',
'privilege_id',
'services_group_id')}
});
window.tagBuilderPopup.tagCabin({
onTagRemoved : function(event, data) {
var elementData = data.element;
var id = data.id;
window.deleteServicePrivilegeDialog(id, elementData);
}
});
var nonAttachedTreeLoadImage = $("#nonAttachedTree").loadImager();
nonAttachedTreeLoadImage.loadImager('appendImage');
var attachedTagsLoadImage = $("#attachedTags").loadImager();
attachedTagsLoadImage.loadImager('appendImage');
var ajPopUpMachProp = $('#test-cabin-popup').ajaxCallWidget({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkFillRestServicesOfPrivileges_sysAclRrpRestservices' ,
language_code : $('#langCode').val(),
id : id,
pk : $("#pk").val()
}
})
ajPopUpMachProp.ajaxCallWidget ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage({
onShown : function () {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
}
});
dm.dangerMessage('show', 'Kategoriye Ait Makina Özellikleri Yüklenememiştir...',
'Kategoriye ait makina özellikleri yüklenememiştir,msistem yöneticisi ile temasa geçiniz...');
},
onSuccess : function (event, data) {
attachedTagsLoadImage.loadImager('removeLoadImage');
window.tagBuilderPopup.tagCabin('addTags', data);
},
onErrorDataNull : function (event) {
wm.warningMessage('resetOnShown');
wm.warningMessage('show', 'Servis Bulunamamıştır', 'Yetkiye atanmış servis bulunamamıştır!');
attachedTagsLoadImage.loadImager('removeLoadImage');
},
})
ajPopUpMachProp.ajaxCallWidget('call');
$('#tt_tree_services_popup').tree({
url: 'https://proxy.sanalfabrika.com/SlimProxyBoot.php?url=pkFillNotInRestServicesOfPrivilegesTree_sysAclRrpRestservices&pk=' + $("#pk").val()+ '&language_code='+$("#langCode").val()+ '&rrp_id='+id,
method: 'get',
animate: true,
checkbox: false,
cascadeCheck: false,
lines: true,
onLoadSuccess: function (node, data) {
nonAttachedTreeLoadImage.loadImager('removeLoadImage');
},
onSelect: function(node) {
},
formatter: function (node) {
var s = node.text;
var id = node.id;
var services_group_id = node.attributes.services_group_id;
if (node.attributes.root == 'false') {
s += ' <i class="fa fa-level-down" title="Servisi Yetkiye Bağla" onclick="attachServiceToPrivilege('+rrp_id+', '+id+' , '+services_group_id+', \''+node.text+'\');"></i>';
return s;
}
return s;
}
});
},
onhide : function() {
/*if(window.gridReloadController == true) {
$('#tt_grid_dynamic').datagrid('reload');
}*/
},
});
return false;
}
/**
* wrapper class for pop up and delete service from specific
* ACL privilege
* @param {integer} nodeID
* @returns {null}
* @author Mustafa Zeynel Dağlı
* @since 29/07/2016
*/
window.deleteServicePrivilegeDialog= function(id, element){
var id = id;
wcm.warningComplexMessage({onConfirm : function(event, data) {
deleteServicePrivilege(id, element);
}
});
wcm.warningComplexMessage('show', 'Makina Özelliğini Kategoriden Silme İşlemi Gerçekleştirmek Üzeresiniz!',
'Makina özelliğini kategoriden silmek üzeresiniz, makina özelliği silme işlemi geri alınamaz!! ');
}
/**
* delete service from a specific ACL privilege
* @param {type} id
* @param {type} element
* @param {type} machine_group_id
* @returns {undefined}
* @since 29/07/2016
*/
window.deleteServicePrivilege = function(id, element) {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
//var ajPopUpDelete = $(window).ajaxCall({
var ajPopUpDelete = $("#loading-image-crud-popup").ajaxCall({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkDelete_sysAclRrpRestservices' ,
id : id,
pk : $("#pk").val()
}
});
ajPopUpDelete.ajaxCall ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage('resetOnShown');
dm.dangerMessage('show', 'Yetki / Servis Silme İşlemi Başarısız...',
'Yetkiden servis silinememiştir, sistem yöneticisi ile temasa geçiniz...');
console.error('"pkDelete_sysAclRrpRestservices" servis hatası->'+textStatus);
},
onSuccess : function (event, data) {
sm.successMessage({
onShown : function() {
loader.loadImager('removeLoadImage');
parentNode = $('#tt_tree_services_popup').tree('find', element.attr('data-services_group_id'));
$('#tt_tree_services_popup').tree('select', parentNode.target);
$('#tt_tree_services_popup').tree('expand', parentNode.target);
//$('#tt_tree_services_popup').tree('collapseAll');
$('#tt_tree_services_popup').tree('append', {
parent: parentNode.target,
data: [{
attributes:{
active: 0,
description: '',
last_node : 'true',
root : 'false',
service : 'true',
services_group_id : element.attr('data-services_group_id'),
},
id: element.attr('data-restservices_id'),
text: element.text(),
checked: false,
state : 'open',
},]
});
window.tagBuilderPopup.tagCabin('removeTag', element);
}
});
sm.successMessage('show', 'Yetki / Servis Silme İşleminiz Başarılı...',
'Yetki / Servis silme işleminiz başarılı...')
},
});
ajPopUpDelete.ajaxCall('call');
}
/**
* attach rest service end point and ACL privilege
* @param {type} rrp_id
* @param {type} restservices_id
* @param {type} service_name
* @returns {undefined}
* @author Mustafa Zeynel Dağlı
* @since 29/07/2016
*/
window.attachServiceToPrivilege = function(rrp_id, restservices_id, services_group_id, service_name) {
var loader = $("#loading-image-crud-popup").loadImager();
loader.loadImager('appendImage');
var ajServiceAttach = $(window).ajaxCall({
proxy : 'https://proxy.sanalfabrika.com/SlimProxyBoot.php',
data : {
url:'pkInsert_sysAclRrpRestservices' ,
language_code : $('#langCode').val(),
rrp_id : rrp_id,
restservices_id : restservices_id,
description : '',
pk : $("#pk").val()
}
})
ajServiceAttach.ajaxCall ({
onError : function (event, textStatus, errorThrown) {
dm.dangerMessage({
onShown : function () {
}
});
dm.dangerMessage('show', 'Service Yetkiye Atanamamıştır!...',
'Service ilgili yetkiye atanamamıştır,sistem yöneticisi ile temasa geçiniz...');
},
onSuccess : function (event, data) {
var id = data.lastInsertId;
sm.successMessage({
onShown: function( event, data ) {
loader.loadImager('removeLoadImage');
window.tagBuilderPopup.tagCabin('addTagManuallyDataAttr',
id,
service_name,
{services_group_id : services_group_id,
rrp_id : rrp_id,
restservices_id : restservices_id,
description : ''});
selectedTreeItem = $('#tt_tree_services_popup').tree('find', restservices_id);
$('#tt_tree_services_popup').tree('remove', selectedTreeItem.target);
}
});
sm.successMessage('show', 'Servis Yetki Atama İşlemi Başarılı...',
'Servise yetki atama işlemi gerçekleştirdiniz... ',
data);
},
onError23505 : function (event, data) {
dm.dangerMessage({
onShown : function(event, data) {
loader.loadImager('removeLoadImage');
}
});
dm.dangerMessage('show', 'Servis Yetki İşlemi Başarısız...',
'Servis ilgili yetki ile daha önce ilişkilendirilmiştir, yeni bir servis deneyiniz... ');
},
});
ajServiceAttach.ajaxCall('call');
}
});
| servis silme işlemlerinde 23503 hata mesajı kodu eklendi
servis silme işlemlerinde 23503 hata mesajı kodu eklendi
| public/jsinline/admin_aclprivilegeservices.js | servis silme işlemlerinde 23503 hata mesajı kodu eklendi | <ide><path>ublic/jsinline/admin_aclprivilegeservices.js
<ide> * @since 04/04/2016
<ide> */
<ide> $.extend($.fn.tree.methods,{
<del> unselect:function(jq,target){
<del> return jq.each(function(){
<del> var opts = $(this).tree('options');
<del> $(target).removeClass('tree-node-selected');
<del> if (opts.onUnselect){
<del> opts.onUnselect.call(this, $(this).tree('getNode',target));
<del> }
<del> });
<del> }
<add> unselect:function(jq,target){
<add> return jq.each(function(){
<add> var opts = $(this).tree('options');
<add> $(target).removeClass('tree-node-selected');
<add> if (opts.onUnselect){
<add> opts.onUnselect.call(this, $(this).tree('getNode',target));
<add> }
<add> });
<add> }
<ide> });
<ide>
<ide> /**
<ide> });
<ide> sm.successMessage('show', 'Yetki / Servis Silme İşleminiz Başarılı...',
<ide> 'Yetki / Servis silme işleminiz başarılı...')
<del> },
<add> },
<add> onError23503 : function (event, data) {
<add> wm.warningMessage()('resetOnShown');
<add> wm.warningMessage('show', 'Silme İşlemi Gerçekleştiremezsiniz !',
<add> 'Servise bağlı bir operasyon tanımlanmıştır, veri bütünlüğünün bozulmaması için\n\
<add> öncelikle servisin bağlı olduğu operasyonun silinmesi gerekmektedir');
<add> loader.loadImager('removeLoadImage');
<add> }
<ide> });
<ide> ajPopUpDelete.ajaxCall('call');
<ide> } |
|
Java | bsd-3-clause | 4bd03ccd104f97dd95389d3b57ed02dd230b8b12 | 0 | jacksonicson/rain | /*
* Copyright (c) 2010, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of California, Berkeley
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package radlab.rain.scoreboard;
import java.util.LinkedList;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import radlab.rain.RainConfig;
import radlab.rain.operation.OperationExecution;
import de.tum.in.dss.psquare.PSquared;
public class OperationSummary {
private static Logger logger = LoggerFactory.getLogger(OperationSummary.class);
// Information recorded about one operation type
private long opsSuccessful = 0;
private long opsFailed = 0;
private long actionsSuccessful = 0;
private long opsAsync = 0;
private long opsSync = 0;
private long minResponseTime = Long.MAX_VALUE;
private long maxResponseTime = Long.MIN_VALUE;
private long totalResponseTime = 0;
private long opsFailedRtimeThreshold = 0;
// Sample the response times so that we can give a "reasonable"
// estimate of the 90th and 99th percentiles.
private IMetricSampler responseTimeSampler;
// Percentile estimation based on the P-square algorithm
private PSquared rtime99th = new PSquared(0.99f);
private PSquared rtime95th = new PSquared(0.95f);
private PSquared rtime90th = new PSquared(0.90f);
private PSquared rtime50th = new PSquared(0.50f);
public OperationSummary(IMetricSampler strategy) {
responseTimeSampler = strategy;
}
void resetSamples() {
responseTimeSampler.reset();
}
void processResult(OperationExecution result) {
if (result.failed) {
opsFailed++;
} else { // Result successful
opsSuccessful++;
actionsSuccessful += result.actionsPerformed;
// Count operations
if (result.async) {
opsAsync++;
} else {
opsSync++;
}
// Update response time sample
long responseTime = result.getExecutionTime();
responseTimeSampler.accept(responseTime);
totalResponseTime += responseTime;
if (responseTime > RainConfig.rtime_T)
opsFailedRtimeThreshold++;
// Update response time percentile estimations
rtime99th.accept(responseTime);
rtime95th.accept(responseTime);
rtime90th.accept(responseTime);
rtime50th.accept(responseTime);
// Update max and min response time
maxResponseTime = Math.max(maxResponseTime, responseTime);
minResponseTime = Math.min(minResponseTime, responseTime);
}
}
JSONObject getStatistics(double runDuration) throws JSONException {
// Total operations executed
long totalOperations = opsSuccessful + opsFailed;
double effectiveLoadOperations = 0;
double effectiveLoadRequests = 0;
double averageRTime = 0;
// Calculations (per second)
if (runDuration > 0) {
effectiveLoadOperations = (double) opsSuccessful / toSeconds(runDuration);
effectiveLoadRequests = (double) actionsSuccessful / toSeconds(runDuration);
} else {
logger.warn("run duration <= 0");
}
if (opsSuccessful > 0) {
averageRTime = (double) totalResponseTime / (double) opsSuccessful;
} else {
logger.warn("total ops successfull <= 0");
}
// Results
JSONObject operation = new JSONObject();
operation.put("ops_successful", opsSuccessful);
operation.put("ops_failed", opsFailed);
operation.put("ops_seen", totalOperations);
operation.put("actions_successful", actionsSuccessful);
operation.put("ops_async", opsAsync);
operation.put("ops_sync", opsSync);
operation.put("effective_load_ops", effectiveLoadOperations);
operation.put("effective_load_req", effectiveLoadRequests);
operation.put("rtime_total", totalResponseTime);
operation.put("rtime_average", nNaN(averageRTime));
operation.put("rtime_max", maxResponseTime);
operation.put("rtime_min", minResponseTime);
operation.put("rtime_50th", nNaN(rtime50th.getPValue()));
operation.put("rtime_90th", nNaN(rtime90th.getPValue()));
operation.put("rtime_95th", nNaN(rtime95th.getPValue()));
operation.put("rtime_99th", nNaN(rtime99th.getPValue()));
operation.put("rtime_thr_failed", opsFailedRtimeThreshold);
operation.put("sampler_samples_collected", responseTimeSampler.getSamplesCollected());
operation.put("sampler_samples_seen", responseTimeSampler.getSamplesSeen());
operation.put("sampler_rtime_50th", nNaN(responseTimeSampler.getNthPercentile(50)));
operation.put("sampler_rtime_90th", nNaN(responseTimeSampler.getNthPercentile(90)));
operation.put("sampler_rtime_95th", nNaN(responseTimeSampler.getNthPercentile(95)));
operation.put("sampler_rtime_99th", nNaN(responseTimeSampler.getNthPercentile(99)));
operation.put("sampler_rtime_mean", nNaN(responseTimeSampler.getSampleMean()));
operation.put("sampler_rtime_stdev", nNaN(responseTimeSampler.getSampleStandardDeviation()));
operation.put("sampler_rtime_tvalue", nNaN(responseTimeSampler.getTvalue(averageRTime)));
return operation;
}
private double nNaN(double val) {
if (Double.isNaN(val))
return 0;
else if (Double.isInfinite(val))
return 0;
return val;
}
private final double toSeconds(double timestamp) {
return timestamp / 1000d;
}
private IMetricSampler getResponseTimeSampler() {
return responseTimeSampler;
}
public void merge(OperationSummary from) {
opsSuccessful += from.opsSuccessful;
opsFailed += from.opsFailed;
actionsSuccessful += from.actionsSuccessful;
opsAsync += from.opsAsync;
opsSync += from.opsSync;
minResponseTime = Math.min(minResponseTime, from.minResponseTime);
maxResponseTime = Math.max(maxResponseTime, from.maxResponseTime);
totalResponseTime += from.totalResponseTime;
opsFailedRtimeThreshold += from.opsFailedRtimeThreshold;
// TODO: How to combine two separate percentiles?
rtime99th.accept(from.rtime99th.getPValue());
rtime95th.accept(from.rtime95th.getPValue());
rtime90th.accept(from.rtime90th.getPValue());
rtime50th.accept(from.rtime50th.getPValue());
// Accept all response time samples
LinkedList<Long> rhsRawSamples = from.getResponseTimeSampler().getRawSamples();
for (Long obs : rhsRawSamples)
responseTimeSampler.accept(obs);
}
public long getOpsSuccessful() {
return opsSuccessful;
}
public long getOpsFailed() {
return opsFailed;
}
public long getTotalResponseTime() {
return totalResponseTime;
}
}
| src/radlab/rain/scoreboard/OperationSummary.java | /*
* Copyright (c) 2010, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of California, Berkeley
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package radlab.rain.scoreboard;
import java.util.LinkedList;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import radlab.rain.RainConfig;
import radlab.rain.operation.OperationExecution;
import de.tum.in.dss.psquare.PSquared;
public class OperationSummary {
private static Logger logger = LoggerFactory.getLogger(OperationSummary.class);
// Information recorded about one operation type
private long opsSuccessful = 0;
private long opsFailed = 0;
private long actionsSuccessful = 0;
private long opsAsync = 0;
private long opsSync = 0;
private long minResponseTime = Long.MAX_VALUE;
private long maxResponseTime = Long.MIN_VALUE;
private long totalResponseTime = 0;
private long opsFailedRtimeThreshold = 0;
// Sample the response times so that we can give a "reasonable"
// estimate of the 90th and 99th percentiles.
private IMetricSampler responseTimeSampler;
// Percentile estimation based on the P-square algorithm
private PSquared rtime99th = new PSquared(0.99f);
private PSquared rtime95th = new PSquared(0.95f);
private PSquared rtime90th = new PSquared(0.90f);
private PSquared rtime50th = new PSquared(0.50f);
public OperationSummary(IMetricSampler strategy) {
responseTimeSampler = strategy;
}
void resetSamples() {
responseTimeSampler.reset();
}
void processResult(OperationExecution result) {
if (result.failed) {
opsFailed++;
} else { // Result successful
opsSuccessful++;
actionsSuccessful += result.actionsPerformed;
// Count operations
if (result.async) {
opsAsync++;
} else {
opsSync++;
}
// Update response time sample
long responseTime = result.getExecutionTime();
responseTimeSampler.accept(responseTime);
totalResponseTime += responseTime;
if (responseTime > RainConfig.rtime_T)
opsFailedRtimeThreshold++;
// Update response time percentile estimations
rtime99th.accept(responseTime);
rtime95th.accept(responseTime);
rtime90th.accept(responseTime);
rtime50th.accept(responseTime);
// Update max and min response time
maxResponseTime = Math.max(maxResponseTime, responseTime);
minResponseTime = Math.min(minResponseTime, responseTime);
}
}
JSONObject getStatistics(double runDuration) throws JSONException {
// Total operations executed
long totalOperations = opsSuccessful + opsFailed;
double effectiveLoadOperations = 0;
double effectiveLoadRequests = 0;
double averageRTime = 0;
// Calculations (per second)
if (runDuration > 0) {
effectiveLoadOperations = (double) opsSuccessful / toSeconds(runDuration);
effectiveLoadRequests = (double) actionsSuccessful / toSeconds(runDuration);
} else {
logger.warn("run duration <= 0");
}
if (opsSuccessful > 0) {
averageRTime = (double) totalResponseTime / (double) opsSuccessful;
} else {
logger.warn("total ops successfull <= 0");
}
// Results
JSONObject operation = new JSONObject();
operation.put("ops_successful", opsSuccessful);
operation.put("ops_failed", opsFailed);
operation.put("ops_seen", totalOperations);
operation.put("actions_successful", actionsSuccessful);
operation.put("ops_async", opsAsync);
operation.put("ops_sync", opsSync);
operation.put("effective_load_ops", effectiveLoadOperations);
operation.put("effective_load_req", effectiveLoadRequests);
operation.put("rtime_total", totalResponseTime);
operation.put("rtime_average", nNaN(averageRTime));
operation.put("rtime_max", maxResponseTime);
operation.put("rtime_min", minResponseTime);
operation.put("rtime_50th", nNaN(rtime50th.getPValue()));
operation.put("rtime_90th", nNaN(rtime90th.getPValue()));
operation.put("rtime_95th", nNaN(rtime95th.getPValue()));
operation.put("rtime_99th", nNaN(rtime99th.getPValue()));
operation.put("rtime_thr_failed", opsFailedRtimeThreshold);
operation.put("sampler_samples_collected", responseTimeSampler.getSamplesCollected());
operation.put("sampler_samples_seen", responseTimeSampler.getSamplesSeen());
operation.put("sampler_rtime_50th", nNaN(responseTimeSampler.getNthPercentile(50)));
operation.put("sampler_rtime_90th", nNaN(responseTimeSampler.getNthPercentile(90)));
operation.put("sampler_rtime_95th", nNaN(responseTimeSampler.getNthPercentile(95)));
operation.put("sampler_rtime_99th", nNaN(responseTimeSampler.getNthPercentile(99)));
operation.put("sampler_rtime_mean", nNaN(responseTimeSampler.getSampleMean()));
operation.put("sampler_rtime_stdev", nNaN(responseTimeSampler.getSampleStandardDeviation()));
operation.put("sampelr_rtime_tvalue", nNaN(responseTimeSampler.getTvalue(averageRTime)));
return operation;
}
private double nNaN(double val) {
if (Double.isNaN(val))
return 0;
else if (Double.isInfinite(val))
return 0;
return val;
}
private final double toSeconds(double timestamp) {
return timestamp / 1000d;
}
private IMetricSampler getResponseTimeSampler() {
return responseTimeSampler;
}
public void merge(OperationSummary from) {
opsSuccessful += from.opsSuccessful;
opsFailed += from.opsFailed;
actionsSuccessful += from.actionsSuccessful;
opsAsync += from.opsAsync;
opsSync += from.opsSync;
minResponseTime = Math.min(minResponseTime, from.minResponseTime);
maxResponseTime = Math.max(maxResponseTime, from.maxResponseTime);
totalResponseTime += from.totalResponseTime;
opsFailedRtimeThreshold += from.opsFailedRtimeThreshold;
// TODO: How to combine two separate percentiles?
rtime99th.accept(from.rtime99th.getPValue());
rtime95th.accept(from.rtime95th.getPValue());
rtime90th.accept(from.rtime90th.getPValue());
rtime50th.accept(from.rtime50th.getPValue());
// Accept all response time samples
LinkedList<Long> rhsRawSamples = from.getResponseTimeSampler().getRawSamples();
for (Long obs : rhsRawSamples)
responseTimeSampler.accept(obs);
}
public long getOpsSuccessful() {
return opsSuccessful;
}
public long getOpsFailed() {
return opsFailed;
}
public long getTotalResponseTime() {
return totalResponseTime;
}
}
| fixed typo in JSON name
| src/radlab/rain/scoreboard/OperationSummary.java | fixed typo in JSON name | <ide><path>rc/radlab/rain/scoreboard/OperationSummary.java
<ide> operation.put("sampler_rtime_99th", nNaN(responseTimeSampler.getNthPercentile(99)));
<ide> operation.put("sampler_rtime_mean", nNaN(responseTimeSampler.getSampleMean()));
<ide> operation.put("sampler_rtime_stdev", nNaN(responseTimeSampler.getSampleStandardDeviation()));
<del> operation.put("sampelr_rtime_tvalue", nNaN(responseTimeSampler.getTvalue(averageRTime)));
<add> operation.put("sampler_rtime_tvalue", nNaN(responseTimeSampler.getTvalue(averageRTime)));
<ide>
<ide> return operation;
<ide> } |
|
Java | lgpl-2.1 | 35e032c9a8b2430b892d1061e150c3f032f51085 | 0 | levants/lightmare | package org.lightmare.jndi;
import java.io.IOException;
import java.util.Properties;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.naming.spi.InitialContextFactory;
import org.lightmare.utils.ObjectUtils;
/**
* Utility class to initialize and set (
* {@link System#setProperty(String, String)}) the {@link InitialContextFactory}
* for simple jndi extensions
*
* @author levan
*
*/
public class JndiManager {
// Value of InitialContextFactory implementation class
private static final Class<LightmareInitialContextFactory> FACTORY_CLASS = LightmareInitialContextFactory.class;
// Name of InitialContextFactory implementation class package
private static final String PACKAGE_PREFIXES = FACTORY_CLASS.getPackage()
.getName();
// Name of InitialContextFactory implementation class
private static final String FACTORY_CLASS_NAME = FACTORY_CLASS.getName();
private static boolean isContextFactory;
private static Context context;
private static final Lock LOCK = new ReentrantLock();
/**
* Creates and sets {@link InitialContext}
*
* @throws IOException
*/
private void setInitialCotext() throws IOException {
if (ObjectUtils.notTrue(isContextFactory)) {
System.getProperties().put(Context.INITIAL_CONTEXT_FACTORY,
FACTORY_CLASS_NAME);
System.getProperties().put(Context.URL_PKG_PREFIXES,
PACKAGE_PREFIXES);
isContextFactory = Boolean.TRUE;
}
if (context == null) {
try {
Properties properties = new Properties();
properties.put(Context.INITIAL_CONTEXT_FACTORY,
FACTORY_CLASS_NAME);
properties.put(Context.URL_PKG_PREFIXES, PACKAGE_PREFIXES);
context = new InitialContext(properties);
} catch (NamingException ex) {
throw new IOException(ex);
}
}
}
/**
* Getter for {@link Context} with check if it is initialized if not calls
* {@link JndiManager#setInitialCotext()} method
*
* @return {@link Context}
* @throws IOException
*/
public Context getContext() throws IOException {
if (context == null) {
LOCK.lock();
try {
setInitialCotext();
} finally {
LOCK.unlock();
}
}
return context;
}
/**
* Lookups data with passed name in {@link Context} and cast it in generic
* type
*
* @param name
* @return <code>T</code>
* @throws IOException
*/
public <T> T lookup(String name) throws IOException {
try {
@SuppressWarnings("unchecked")
T value = (T) getContext().lookup(name);
return value;
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Rebinds passed {@link Object} to {@link Context} by appropriate name
*
* @param name
* @param data
* @throws IOException
*/
public void rebind(String name, Object data) throws IOException {
try {
getContext().rebind(name, data);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Binds passed {@link Object} to {@link Context} by appropriate name
*
* @param name
* @param data
* @throws IOException
*/
public void bind(String name, Object data) throws IOException {
try {
getContext().bind(name, data);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Unbinds passed name from {@link Context}
*
* @param name
* @throws IOException
*/
public void unbind(String name) throws IOException {
try {
getContext().unbind(name);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
}
| src/main/java/org/lightmare/jndi/JndiManager.java | package org.lightmare.jndi;
import java.io.IOException;
import java.util.Properties;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.naming.spi.InitialContextFactory;
import org.lightmare.utils.ObjectUtils;
/**
* Utility class to initialize and set (
* {@link System#setProperty(String, String)}) the {@link InitialContextFactory}
* for simple jndi extensions
*
* @author levan
*
*/
public class JndiManager {
// Value of InitialContextFactory implementation class
private static final Class<LightmareInitialContextFactory> FACTORY_CLASS = LightmareInitialContextFactory.class;
// Name of InitialContextFactory implementation class package
private static final String PACKAGE_PREFIXES = FACTORY_CLASS.getPackage()
.getName();
// Name of InitialContextFactory implementation class
private static final String FACTORY_CLASS_NAME = FACTORY_CLASS.getName();
private static boolean isContextFactory;
private static Context context;
private static final Lock LOCK = new ReentrantLock();
/**
* Creates and sets {@link InitialContext}
*
* @throws IOException
*/
private void setInitialCotext() throws IOException {
if (ObjectUtils.notTrue(isContextFactory)) {
System.getProperties().put(Context.INITIAL_CONTEXT_FACTORY,
FACTORY_CLASS_NAME);
System.getProperties().put(Context.URL_PKG_PREFIXES,
PACKAGE_PREFIXES);
isContextFactory = Boolean.TRUE;
}
if (context == null) {
try {
Properties properties = new Properties();
properties.put(Context.INITIAL_CONTEXT_FACTORY,
FACTORY_CLASS_NAME);
properties.put(Context.URL_PKG_PREFIXES, PACKAGE_PREFIXES);
context = new InitialContext(properties);
} catch (NamingException ex) {
throw new IOException(ex);
}
}
}
/**
* Getter for {@link Context} with check if it is initialized if not calls
* {@link JndiManager#setInitialCotext()} method
*
* @return {@link Context}
* @throws IOException
*/
public Context getContext() throws IOException {
if (context == null) {
LOCK.lock();
try {
setInitialCotext();
} finally {
LOCK.unlock();
}
}
return context;
}
public <T> T lookup(String name) throws IOException {
try {
@SuppressWarnings("unchecked")
T value = (T) getContext().lookup(name);
return value;
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Rebinds passed {@link Object} to {@link Context} by appropriate name
*
* @param name
* @param data
* @throws IOException
*/
public void rebind(String name, Object data) throws IOException {
try {
getContext().rebind(name, data);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Binds passed {@link Object} to {@link Context} by appropriate name
*
* @param name
* @param data
* @throws IOException
*/
public void bind(String name, Object data) throws IOException {
try {
getContext().bind(name, data);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
/**
* Unbinds passed name from {@link Context}
*
* @param name
* @throws IOException
*/
public void unbind(String name) throws IOException {
try {
getContext().unbind(name);
} catch (NamingException ex) {
throw new IOException(ex);
} catch (IOException ex) {
throw new IOException(ex);
}
}
}
| added comments to NamingUtils.lookup method | src/main/java/org/lightmare/jndi/JndiManager.java | added comments to NamingUtils.lookup method | <ide><path>rc/main/java/org/lightmare/jndi/JndiManager.java
<ide> return context;
<ide> }
<ide>
<add> /**
<add> * Lookups data with passed name in {@link Context} and cast it in generic
<add> * type
<add> *
<add> * @param name
<add> * @return <code>T</code>
<add> * @throws IOException
<add> */
<ide> public <T> T lookup(String name) throws IOException {
<ide>
<ide> try { |
|
Java | bsd-3-clause | 5d1fb620f11e7578ceb42671b451d5340487b6b8 | 0 | gengo/gengo-java | package com.gengo.client;
import java.awt.image.BufferedImage;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.gengo.client.enums.HttpMethod;
import com.gengo.client.enums.Rating;
import com.gengo.client.enums.RejectReason;
import com.gengo.client.exceptions.GengoException;
import com.gengo.client.payloads.Approval;
import com.gengo.client.payloads.FileJob;
import com.gengo.client.payloads.JobUpdate;
import com.gengo.client.payloads.Payload;
import com.gengo.client.payloads.Rejection;
import com.gengo.client.payloads.Revision;
import com.gengo.client.payloads.TranslationJob;
import com.gengo.client.payloads.Payloads;
/**
* A Java client for the Gengo.com translation API.
* This client depends on the JSON in Java library available at:
* http://json.org/java/
*/
public class GengoClient extends JsonHttpApi
{
private static final String STANDARD_BASE_URL = "http://api.gengo.com/v2/";
private static final String SANDBOX_BASE_URL = "http://api.sandbox.gengo.com/v2/";
/** Strings used to represent TRUE and FALSE in requests */
public static final String MYGENGO_TRUE = "1";
public static final String MYGENGO_FALSE = "0";
private String baseUrl = STANDARD_BASE_URL;
/**
* Initialize the client.
* @param publicKey your Gengo.com public API key
* @param privateKey your Gengo.com private API key
*/
public GengoClient(String publicKey, String privateKey)
{
this(publicKey, privateKey, false);
}
/**
* Initialize the client with the option to use the sandbox.
* @param publicKey your Gengo.com public API key
* @param privateKey your Gengo.com private API key
* @param useSandbox true to use the sandbox, false to use the live service
*/
public GengoClient(String publicKey, String privateKey, boolean useSandbox)
{
super(publicKey, privateKey);
setUseSandbox(useSandbox);
}
/**
* @return true iff the client is using the sandbox
*/
public boolean getUseSandbox()
{
return SANDBOX_BASE_URL.equals(baseUrl);
}
/**
* Set the client to use the sandbox or the live service.
* @param use true iff the client should use the sandbox
*/
public void setUseSandbox(boolean use)
{
baseUrl = use ? SANDBOX_BASE_URL : STANDARD_BASE_URL;
}
/**
* Set a custom base URL. For development testing purposes only.
* @param baseUrl a custom API base URL
*/
public void setBaseUrl(String baseUrl)
{
this.baseUrl = baseUrl;
}
/**
* Get account statistics.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getAccountStats() throws GengoException
{
String url = baseUrl + "account/stats";
return call(url, HttpMethod.GET);
}
/**
* Get account balance.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getAccountBalance() throws GengoException
{
String url = baseUrl + "account/balance";
return call(url, HttpMethod.GET);
}
/**
* Submit a job for translation.
* @param job a job payload object
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJob(TranslationJob job)
throws GengoException
{
try
{
String url = baseUrl + "translate/job";
JSONObject data = new JSONObject();
data.put("job", job.toJSONObject());
return call(url, HttpMethod.POST, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Submit multiple jobs for translation.
* @param jobs TranslationJob payload objects
* @param processAsGroup true iff the jobs should be processed as a group
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobs(List<TranslationJob> jobs, boolean processAsGroup)
throws GengoException
{
return postTranslationJobs(jobs, processAsGroup, false);
}
/**
* Submit multiple jobs for translation.
* @param jobs TranslationJob payload objects
* @param processAsGroup true iff the jobs should be processed as a group
* @param fixBadResponse true iff malformed job responses should be fixed
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobs(List<TranslationJob> jobs, boolean processAsGroup, boolean fixBadResponse)
throws GengoException
{
try
{
String url = baseUrl + "translate/jobs";
JSONObject data = new JSONObject();
/* We can safely cast our list of jobs into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<Payload> p = (List)jobs;
data.put("jobs", (new Payloads(p)).toJSONArray());
data.put("as_group", processAsGroup ? MYGENGO_TRUE : MYGENGO_FALSE);
JSONObject rsp = call(url, HttpMethod.POST, data);
return fixBadResponse ? fixBadPostJobsResponse(rsp) : rsp;
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/* Temporary workaround for a server side issue with this API method */
private JSONObject fixBadPostJobsResponse(JSONObject rsp)
{
try
{
if ("ok".equals(rsp.getString("opstat")))
{
JSONObject innerRsp = rsp.getJSONObject("response");
JSONArray jobs = innerRsp.getJSONArray("jobs");
if (jobs.length() > 0)
{
JSONObject first = jobs.getJSONArray(0).getJSONObject(0);
JSONObject newFirst = new JSONObject();
newFirst.put("0", first);
jobs.put(0, newFirst);
innerRsp.put("jobs", jobs);
rsp.put("response", innerRsp);
}
}
}
catch (JSONException e)
{
// not a response we can or need to fix
}
return rsp;
}
/**
* Request revisions for a job.
* @param id The job ID
* @param comments Comments for the translator
* @return the response from the server
* @throws GengoException
*/
public JSONObject reviseTranslationJob(int id, String comments)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "revise");
data.put("comment", comments);
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Approve a translation.
* @param id The job ID
* @param rating A rating for the translation
* @param commentsForTranslator Comments for the translator
* @param commentsForGengo Comments for Gengo
* @param feedbackIsPublic true iff the feedback can be shared publicly
* @return the response from the server
* @throws GengoException
*/
public JSONObject approveTranslationJob(int id, Rating rating,
String commentsForTranslator, String commentsForGengo,
boolean feedbackIsPublic) throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "approve");
data.put("for_translator", commentsForTranslator);
data.put("for_gengo", commentsForGengo);
data.put("public", feedbackIsPublic ? MYGENGO_TRUE : MYGENGO_FALSE);
data.put("rating", rating.toString());
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Reject a translation.
* @param id the job ID
* @param reason reason for rejection
* @param comments comments for Gengo
* @param captcha the captcha image text
* @param requeue true iff the job should be passed on to another translator
* @return the response from the server
* @throws GengoException
*/
public JSONObject rejectTranslationJob(int id, RejectReason reason,
String comments, String captcha, boolean requeue)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "reject");
data.put("reason", reason.toString().toLowerCase());
data.put("comment", comments);
data.put("captcha", captcha);
data.put("follow_up", requeue ? "requeue" : "cancel");
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a translation job
* @param id the job id
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJob(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id;
return call(url, HttpMethod.GET);
}
/**
* Get all translation jobs
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobs() throws GengoException
{
String url = baseUrl + "translate/jobs/";
return call(url, HttpMethod.GET);
}
/**
* Get selected translation jobs
* @param ids a list of job ids to retrieve
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobs(List<Integer> ids) throws GengoException
{
String url = baseUrl + "translate/jobs/";
url += join(ids, ",");
return call(url, HttpMethod.GET);
}
/**
* Get translation jobs which were previously submitted as a group
* @param groupId The group job number for these jobs.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getGroupJobs(int groupId) throws GengoException
{
String url = baseUrl + "translate/jobs/group/";
url += groupId;
return call(url, HttpMethod.GET);
}
/**
* Post a comment for a translation job
* @param id the ID of the job to comment on
* @param comment the comment
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobComment(int id, String comment)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id + "/comment";
JSONObject data = new JSONObject();
data.put("body", comment);
return call(url, HttpMethod.POST, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get comments for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobComments(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/comments/";
return call(url, HttpMethod.GET);
}
/**
* Get feedback for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobFeedback(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/feedback";
return call(url, HttpMethod.GET);
}
/**
* Get all revisions for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobRevisions(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/revisions";
return call(url, HttpMethod.GET);
}
/**
* Get a specific revision for a translation job
* @param id the job ID
* @param revisionId the ID of the revision to retrieve
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobRevision(int id, int revisionId)
throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/revision/"
+ revisionId;
return call(url, HttpMethod.GET);
}
/**
* Get the preview image for a translated job
* @param id the job ID
* @return the image from the server
* @throws GengoException
*/
public BufferedImage getTranslationJobPreviewImage(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/preview";
return getImage(url);
}
/**
* Cancel a translation job. It can only be deleted if it has not been started by a translator.
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject deleteTranslationJob(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id;
return call(url, HttpMethod.DELETE);
}
/**
* Cancel translation jobs. They can only be deleted if they have not been started by a translator.
* @param ids a list of job IDs to delete
* @return the response from the server
* @throws GengoException
*/
public JSONObject deleteTranslationJobs(List<Integer> ids) throws GengoException
{
try
{
String url = baseUrl + "translate/jobs/";
JSONObject data = new JSONObject();
data.put("job_ids", ids);
return call(url, HttpMethod.DELETE, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a list of supported languages and their language codes.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguages() throws GengoException
{
String url = baseUrl + "translate/service/languages";
return call(url, HttpMethod.GET);
}
/**
* Get a list of supported language pairs, tiers, and credit prices.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguagePairs() throws GengoException
{
String url = baseUrl + "translate/service/language_pairs";
return call(url, HttpMethod.GET);
}
/**
* Get a list of supported language pairs, tiers and credit prices for a specific source language.
* @param sourceLanguageCode the language code for the source language
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguagePairs(String sourceLanguageCode) throws GengoException
{
try
{
String url = baseUrl + "translate/service/language_pairs";
JSONObject data = new JSONObject();
data.put("lc_src", sourceLanguageCode);
return call(url, HttpMethod.GET, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a quote for translation jobs.
* @param jobs Translation job objects to be quoted for
* @return the response from the server
* @throws GengoException
*/
public JSONObject determineTranslationCost(Payloads jobs) throws GengoException
{
try
{
String url = baseUrl + "translate/service/quote/";
JSONObject data = new JSONObject();
data.put("jobs", jobs.toJSONArray());
return call(url, HttpMethod.POST, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Update translation jobs
* @param action the update action to apply
* @param approvals the job payload objects
* @return the response from the server
* @throws GengoException
*/
private JSONObject updateTranslationJobs(String action, List<JobUpdate> updates) throws GengoException
{
try
{
String url = baseUrl + "translate/jobs";
JSONObject data = new JSONObject();
data.put("action", action);
String fieldName = null;
for (JobUpdate u : updates)
{
String g = u.isIdentifiedByJobId() ? "job_ids" : "jobs";
if (null != fieldName && !fieldName.equals(g))
{
throw new GengoException("All updates in list must be identified in the same way: either by job_id, or by (lc_src, lc_tgt, body_src, tier)");
}
else
{
fieldName = g;
}
}
/* We can safely cast into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<Payload> p = (List)updates;
data.put(fieldName, (new Payloads(p)).toJSONArray());
return call(url, HttpMethod.PUT, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Revise translations.
* @param revisions Revision payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject reviseTranslationJobs(List<Revision> revisions) throws GengoException
{
/* We can safely cast our list of revisions into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)revisions;
return updateTranslationJobs("revise", p);
}
/**
* Reject translations.
* @param rejections Rejection payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject rejectTranslationJobs(List<Rejection> rejections) throws GengoException
{
/* We can safely cast our list of rejections into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)rejections;
return updateTranslationJobs("reject", p);
}
/**
* Approve translations.
* @param approvals Approval payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject approveTranslationJobs(List<Approval> approvals) throws GengoException
{
/* We can safely cast our list of approvals into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)approvals;
return updateTranslationJobs("approve", p);
}
/**
* Get translation jobs which were previously submitted together by their order id.
*
* @param orderId
* @return the response from the server
* @throws GengoException
*/
public JSONObject getOrderJobs(int orderId) throws GengoException
{
String url = baseUrl + "translate/order/";
url += orderId;
return call(url, HttpMethod.GET);
}
/**
* Get a quote for file jobs.
* @param jobs Translation job objects to be quoted
* @param filePaths map of file keys to filesystem paths
* @return the response from the server
* @throws GengoException
*/
public JSONObject determineTranslationCostFiles(List<FileJob> jobs, Map<String, String> filePaths) throws GengoException
{
try
{
JSONObject theJobs = new JSONObject();
for (int i = 0; i < jobs.size(); i++) {
theJobs.put(String.format("job_%s", i), jobs.get(i).toJSONObject());
}
String url = baseUrl + "translate/service/quote/file";
JSONObject data = new JSONObject();
data.put("jobs", theJobs);
return httpPostFileUpload(url, data, filePaths);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Utility function.
*/
private String join(Iterable<? extends Object> pColl, String separator)
{
Iterator<? extends Object> oIter;
if (pColl == null || (!(oIter = pColl.iterator()).hasNext()))
{
return "";
}
StringBuffer oBuilder = new StringBuffer(String.valueOf(oIter.next()));
while (oIter.hasNext())
{
oBuilder.append(separator).append(oIter.next());
}
return oBuilder.toString();
}
}
| src/main/java/com/gengo/client/GengoClient.java | package com.gengo.client;
import java.awt.image.BufferedImage;
import java.util.Iterator;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.gengo.client.enums.HttpMethod;
import com.gengo.client.enums.Rating;
import com.gengo.client.enums.RejectReason;
import com.gengo.client.exceptions.GengoException;
import com.gengo.client.payloads.Approval;
import com.gengo.client.payloads.JobUpdate;
import com.gengo.client.payloads.Payload;
import com.gengo.client.payloads.Rejection;
import com.gengo.client.payloads.Revision;
import com.gengo.client.payloads.TranslationJob;
import com.gengo.client.payloads.Payloads;
/**
* A Java client for the Gengo.com translation API.
* This client depends on the JSON in Java library available at:
* http://json.org/java/
*/
public class GengoClient extends JsonHttpApi
{
private static final String STANDARD_BASE_URL = "http://api.gengo.com/v2/";
private static final String SANDBOX_BASE_URL = "http://api.sandbox.gengo.com/v2/";
/** Strings used to represent TRUE and FALSE in requests */
public static final String MYGENGO_TRUE = "1";
public static final String MYGENGO_FALSE = "0";
private String baseUrl = STANDARD_BASE_URL;
/**
* Initialize the client.
* @param publicKey your Gengo.com public API key
* @param privateKey your Gengo.com private API key
*/
public GengoClient(String publicKey, String privateKey)
{
this(publicKey, privateKey, false);
}
/**
* Initialize the client with the option to use the sandbox.
* @param publicKey your Gengo.com public API key
* @param privateKey your Gengo.com private API key
* @param useSandbox true to use the sandbox, false to use the live service
*/
public GengoClient(String publicKey, String privateKey, boolean useSandbox)
{
super(publicKey, privateKey);
setUseSandbox(useSandbox);
}
/**
* @return true iff the client is using the sandbox
*/
public boolean getUseSandbox()
{
return SANDBOX_BASE_URL.equals(baseUrl);
}
/**
* Set the client to use the sandbox or the live service.
* @param use true iff the client should use the sandbox
*/
public void setUseSandbox(boolean use)
{
baseUrl = use ? SANDBOX_BASE_URL : STANDARD_BASE_URL;
}
/**
* Set a custom base URL. For development testing purposes only.
* @param baseUrl a custom API base URL
*/
public void setBaseUrl(String baseUrl)
{
this.baseUrl = baseUrl;
}
/**
* Get account statistics.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getAccountStats() throws GengoException
{
String url = baseUrl + "account/stats";
return call(url, HttpMethod.GET);
}
/**
* Get account balance.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getAccountBalance() throws GengoException
{
String url = baseUrl + "account/balance";
return call(url, HttpMethod.GET);
}
/**
* Submit a job for translation.
* @param job a job payload object
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJob(TranslationJob job)
throws GengoException
{
try
{
String url = baseUrl + "translate/job";
JSONObject data = new JSONObject();
data.put("job", job.toJSONObject());
return call(url, HttpMethod.POST, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Submit multiple jobs for translation.
* @param jobs TranslationJob payload objects
* @param processAsGroup true iff the jobs should be processed as a group
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobs(List<TranslationJob> jobs, boolean processAsGroup)
throws GengoException
{
return postTranslationJobs(jobs, processAsGroup, false);
}
/**
* Submit multiple jobs for translation.
* @param jobs TranslationJob payload objects
* @param processAsGroup true iff the jobs should be processed as a group
* @param fixBadResponse true iff malformed job responses should be fixed
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobs(List<TranslationJob> jobs, boolean processAsGroup, boolean fixBadResponse)
throws GengoException
{
try
{
String url = baseUrl + "translate/jobs";
JSONObject data = new JSONObject();
/* We can safely cast our list of jobs into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<Payload> p = (List)jobs;
data.put("jobs", (new Payloads(p)).toJSONArray());
data.put("as_group", processAsGroup ? MYGENGO_TRUE : MYGENGO_FALSE);
JSONObject rsp = call(url, HttpMethod.POST, data);
return fixBadResponse ? fixBadPostJobsResponse(rsp) : rsp;
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/* Temporary workaround for a server side issue with this API method */
private JSONObject fixBadPostJobsResponse(JSONObject rsp)
{
try
{
if ("ok".equals(rsp.getString("opstat")))
{
JSONObject innerRsp = rsp.getJSONObject("response");
JSONArray jobs = innerRsp.getJSONArray("jobs");
if (jobs.length() > 0)
{
JSONObject first = jobs.getJSONArray(0).getJSONObject(0);
JSONObject newFirst = new JSONObject();
newFirst.put("0", first);
jobs.put(0, newFirst);
innerRsp.put("jobs", jobs);
rsp.put("response", innerRsp);
}
}
}
catch (JSONException e)
{
// not a response we can or need to fix
}
return rsp;
}
/**
* Request revisions for a job.
* @param id The job ID
* @param comments Comments for the translator
* @return the response from the server
* @throws GengoException
*/
public JSONObject reviseTranslationJob(int id, String comments)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "revise");
data.put("comment", comments);
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Approve a translation.
* @param id The job ID
* @param rating A rating for the translation
* @param commentsForTranslator Comments for the translator
* @param commentsForGengo Comments for Gengo
* @param feedbackIsPublic true iff the feedback can be shared publicly
* @return the response from the server
* @throws GengoException
*/
public JSONObject approveTranslationJob(int id, Rating rating,
String commentsForTranslator, String commentsForGengo,
boolean feedbackIsPublic) throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "approve");
data.put("for_translator", commentsForTranslator);
data.put("for_gengo", commentsForGengo);
data.put("public", feedbackIsPublic ? MYGENGO_TRUE : MYGENGO_FALSE);
data.put("rating", rating.toString());
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Reject a translation.
* @param id the job ID
* @param reason reason for rejection
* @param comments comments for Gengo
* @param captcha the captcha image text
* @param requeue true iff the job should be passed on to another translator
* @return the response from the server
* @throws GengoException
*/
public JSONObject rejectTranslationJob(int id, RejectReason reason,
String comments, String captcha, boolean requeue)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id;
JSONObject data = new JSONObject();
data.put("action", "reject");
data.put("reason", reason.toString().toLowerCase());
data.put("comment", comments);
data.put("captcha", captcha);
data.put("follow_up", requeue ? "requeue" : "cancel");
return call(url, HttpMethod.PUT, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a translation job
* @param id the job id
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJob(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id;
return call(url, HttpMethod.GET);
}
/**
* Get all translation jobs
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobs() throws GengoException
{
String url = baseUrl + "translate/jobs/";
return call(url, HttpMethod.GET);
}
/**
* Get selected translation jobs
* @param ids a list of job ids to retrieve
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobs(List<Integer> ids) throws GengoException
{
String url = baseUrl + "translate/jobs/";
url += join(ids, ",");
return call(url, HttpMethod.GET);
}
/**
* Get translation jobs which were previously submitted as a group
* @param groupId The group job number for these jobs.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getGroupJobs(int groupId) throws GengoException
{
String url = baseUrl + "translate/jobs/group/";
url += groupId;
return call(url, HttpMethod.GET);
}
/**
* Post a comment for a translation job
* @param id the ID of the job to comment on
* @param comment the comment
* @return the response from the server
* @throws GengoException
*/
public JSONObject postTranslationJobComment(int id, String comment)
throws GengoException
{
try
{
String url = baseUrl + "translate/job/" + id + "/comment";
JSONObject data = new JSONObject();
data.put("body", comment);
return call(url, HttpMethod.POST, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get comments for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobComments(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/comments/";
return call(url, HttpMethod.GET);
}
/**
* Get feedback for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobFeedback(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/feedback";
return call(url, HttpMethod.GET);
}
/**
* Get all revisions for a translation job
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobRevisions(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/revisions";
return call(url, HttpMethod.GET);
}
/**
* Get a specific revision for a translation job
* @param id the job ID
* @param revisionId the ID of the revision to retrieve
* @return the response from the server
* @throws GengoException
*/
public JSONObject getTranslationJobRevision(int id, int revisionId)
throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/revision/"
+ revisionId;
return call(url, HttpMethod.GET);
}
/**
* Get the preview image for a translated job
* @param id the job ID
* @return the image from the server
* @throws GengoException
*/
public BufferedImage getTranslationJobPreviewImage(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id + "/preview";
return getImage(url);
}
/**
* Cancel a translation job. It can only be deleted if it has not been started by a translator.
* @param id the job ID
* @return the response from the server
* @throws GengoException
*/
public JSONObject deleteTranslationJob(int id) throws GengoException
{
String url = baseUrl + "translate/job/" + id;
return call(url, HttpMethod.DELETE);
}
/**
* Cancel translation jobs. They can only be deleted if they have not been started by a translator.
* @param ids a list of job IDs to delete
* @return the response from the server
* @throws GengoException
*/
public JSONObject deleteTranslationJobs(List<Integer> ids) throws GengoException
{
try
{
String url = baseUrl + "translate/jobs/";
JSONObject data = new JSONObject();
data.put("job_ids", ids);
return call(url, HttpMethod.DELETE, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a list of supported languages and their language codes.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguages() throws GengoException
{
String url = baseUrl + "translate/service/languages";
return call(url, HttpMethod.GET);
}
/**
* Get a list of supported language pairs, tiers, and credit prices.
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguagePairs() throws GengoException
{
String url = baseUrl + "translate/service/language_pairs";
return call(url, HttpMethod.GET);
}
/**
* Get a list of supported language pairs, tiers and credit prices for a specific source language.
* @param sourceLanguageCode the language code for the source language
* @return the response from the server
* @throws GengoException
*/
public JSONObject getServiceLanguagePairs(String sourceLanguageCode) throws GengoException
{
try
{
String url = baseUrl + "translate/service/language_pairs";
JSONObject data = new JSONObject();
data.put("lc_src", sourceLanguageCode);
return call(url, HttpMethod.GET, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Get a quote for translation jobs.
* @param jobs Translation job objects to be quoted for
* @return the response from the server
* @throws GengoException
*/
public JSONObject determineTranslationCost(Payloads jobs) throws GengoException
{
try
{
String url = baseUrl + "translate/service/quote/";
JSONObject data = new JSONObject();
data.put("jobs", jobs.toJSONArray());
return call(url, HttpMethod.POST, data);
} catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Update translation jobs
* @param action the update action to apply
* @param approvals the job payload objects
* @return the response from the server
* @throws GengoException
*/
private JSONObject updateTranslationJobs(String action, List<JobUpdate> updates) throws GengoException
{
try
{
String url = baseUrl + "translate/jobs";
JSONObject data = new JSONObject();
data.put("action", action);
String fieldName = null;
for (JobUpdate u : updates)
{
String g = u.isIdentifiedByJobId() ? "job_ids" : "jobs";
if (null != fieldName && !fieldName.equals(g))
{
throw new GengoException("All updates in list must be identified in the same way: either by job_id, or by (lc_src, lc_tgt, body_src, tier)");
}
else
{
fieldName = g;
}
}
/* We can safely cast into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<Payload> p = (List)updates;
data.put(fieldName, (new Payloads(p)).toJSONArray());
return call(url, HttpMethod.PUT, data);
}
catch (JSONException x)
{
throw new GengoException(x.getMessage(), x);
}
}
/**
* Revise translations.
* @param revisions Revision payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject reviseTranslationJobs(List<Revision> revisions) throws GengoException
{
/* We can safely cast our list of revisions into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)revisions;
return updateTranslationJobs("revise", p);
}
/**
* Reject translations.
* @param rejections Rejection payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject rejectTranslationJobs(List<Rejection> rejections) throws GengoException
{
/* We can safely cast our list of rejections into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)rejections;
return updateTranslationJobs("reject", p);
}
/**
* Approve translations.
* @param approvals Approval payload objects
* @return the response from the server
* @throws GengoException
*/
public JSONObject approveTranslationJobs(List<Approval> approvals) throws GengoException
{
/* We can safely cast our list of approvals into a list of the payload base type */
@SuppressWarnings({ "rawtypes", "unchecked" })
List<JobUpdate> p = (List)approvals;
return updateTranslationJobs("approve", p);
}
/**
* Get translation jobs which were previously submitted together by their order id.
*
* @param orderId
* @return the response from the server
* @throws GengoException
*/
public JSONObject getOrderJobs(int orderId) throws GengoException
{
String url = baseUrl + "translate/order/";
url += orderId;
return call(url, HttpMethod.GET);
}
/**
* Utility function.
*/
private String join(Iterable<? extends Object> pColl, String separator)
{
Iterator<? extends Object> oIter;
if (pColl == null || (!(oIter = pColl.iterator()).hasNext()))
{
return "";
}
StringBuffer oBuilder = new StringBuffer(String.valueOf(oIter.next()));
while (oIter.hasNext())
{
oBuilder.append(separator).append(oIter.next());
}
return oBuilder.toString();
}
}
| add determineTranslationCostFiles
| src/main/java/com/gengo/client/GengoClient.java | add determineTranslationCostFiles | <ide><path>rc/main/java/com/gengo/client/GengoClient.java
<ide> import java.awt.image.BufferedImage;
<ide> import java.util.Iterator;
<ide> import java.util.List;
<add>import java.util.Map;
<ide>
<ide> import org.json.JSONArray;
<ide> import org.json.JSONException;
<ide> import com.gengo.client.enums.RejectReason;
<ide> import com.gengo.client.exceptions.GengoException;
<ide> import com.gengo.client.payloads.Approval;
<add>import com.gengo.client.payloads.FileJob;
<ide> import com.gengo.client.payloads.JobUpdate;
<ide> import com.gengo.client.payloads.Payload;
<ide> import com.gengo.client.payloads.Rejection;
<ide> return call(url, HttpMethod.GET);
<ide> }
<ide>
<add> /**
<add> * Get a quote for file jobs.
<add> * @param jobs Translation job objects to be quoted
<add> * @param filePaths map of file keys to filesystem paths
<add> * @return the response from the server
<add> * @throws GengoException
<add> */
<add> public JSONObject determineTranslationCostFiles(List<FileJob> jobs, Map<String, String> filePaths) throws GengoException
<add> {
<add> try
<add> {
<add> JSONObject theJobs = new JSONObject();
<add>
<add> for (int i = 0; i < jobs.size(); i++) {
<add> theJobs.put(String.format("job_%s", i), jobs.get(i).toJSONObject());
<add> }
<add> String url = baseUrl + "translate/service/quote/file";
<add> JSONObject data = new JSONObject();
<add> data.put("jobs", theJobs);
<add>
<add> return httpPostFileUpload(url, data, filePaths);
<add> } catch (JSONException x)
<add> {
<add> throw new GengoException(x.getMessage(), x);
<add> }
<add> }
<ide>
<ide> /**
<ide> * Utility function. |
|
Java | lgpl-2.1 | 9f15b5a409470a1ed7d72778df6ffa8cf614d849 | 0 | vigna/Sux4J,vigna/Sux4J,vigna/Sux4J,vigna/Sux4J | package it.unimi.dsi.sux4j.mph;
/*
* Sux4J: Succinct data structures for Java
*
* Copyright (C) 2008 Sebastiano Vigna
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
import it.unimi.dsi.Util;
import it.unimi.dsi.bits.BitVector;
import it.unimi.dsi.bits.Fast;
import it.unimi.dsi.bits.HuTuckerTransformationStrategy;
import it.unimi.dsi.bits.TransformationStrategies;
import it.unimi.dsi.bits.TransformationStrategy;
import it.unimi.dsi.fastutil.io.BinIO;
import it.unimi.dsi.fastutil.longs.AbstractLongList;
import it.unimi.dsi.io.FastBufferedReader;
import it.unimi.dsi.io.FileLinesCollection;
import it.unimi.dsi.io.LineIterator;
import it.unimi.dsi.lang.MutableString;
import it.unimi.dsi.logging.ProgressLogger;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.zip.GZIPInputStream;
import org.apache.log4j.Logger;
import com.martiansoftware.jsap.FlaggedOption;
import com.martiansoftware.jsap.JSAP;
import com.martiansoftware.jsap.JSAPException;
import com.martiansoftware.jsap.JSAPResult;
import com.martiansoftware.jsap.Parameter;
import com.martiansoftware.jsap.SimpleJSAP;
import com.martiansoftware.jsap.Switch;
import com.martiansoftware.jsap.UnflaggedOption;
import com.martiansoftware.jsap.stringparsers.ForNameStringParser;
/** A monotone minimal perfect hash implementation based on fixed-size bucketing that uses
* a {@linkplain RelativeTrieDistributor relative trie} as a distributor.
*
*/
public class RelativeTrieMonotoneMinimalPerfectHashFunction<T> extends AbstractHashFunction<T> implements Serializable {
public static final long serialVersionUID = 1L;
private static final Logger LOGGER = Util.getLogger( RelativeTrieMonotoneMinimalPerfectHashFunction.class );
/** The number of elements. */
private final int size;
/** The size of a bucket. */
private final int bucketSize;
/** {@link Fast#ceilLog2(int)} of {@link #bucketSize}. */
private final int log2BucketSize;
/** The transformation strategy. */
private final TransformationStrategy<? super T> transform;
/** A hollow trie distributor assigning keys to buckets. */
private final RelativeTrieDistributor<BitVector> distributor;
/** The offset of each element into his bucket. */
private final MWHCFunction<BitVector> offset;
@SuppressWarnings("unchecked")
public long getLong( final Object o ) {
if ( size == 0 ) return -1;
final BitVector bv = transform.toBitVector( (T)o ).fast();
final long bucket = distributor.getLong( bv );
return ( bucket << log2BucketSize ) + offset.getLong( bv );
}
/** Creates a new hollow-trie-based monotone minimal perfect hash function using the given
* elements and transformation strategy.
*
* @param elements the elements among which the trie must be able to rank.
* @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
* distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
*/
public RelativeTrieMonotoneMinimalPerfectHashFunction( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform ) {
this( elements, transform, -1 );
}
/** Creates a new hollow-trie-based monotone minimal perfect hash function using the given
* elements, transformation strategy and bucket size.
*
* <p>This constructor is mainly for debugging and testing purposes.
*
* @param elements the elements among which the trie must be able to rank.
* @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
* distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
* @param log2BucketSize the logarithm of the bucket size.
*/
public RelativeTrieMonotoneMinimalPerfectHashFunction( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform, int log2BucketSize ) {
this.transform = transform;
long maxLength = 0;
long totalLength = 0;
int c = 0;
BitVector bv;
for( T s: elements ) {
bv = transform.toBitVector( s );
maxLength = Math.max( maxLength, bv.length() );
totalLength += bv.length();
c++;
}
size = c;
if ( size == 0 ) {
bucketSize = this.log2BucketSize = 0;
distributor = null;
offset = null;
return;
}
final long averageLength = ( totalLength + size - 1 ) / size;
this.log2BucketSize = log2BucketSize == -1 ? Fast.mostSignificantBit( 16 + 7 * Fast.ceilLog2( averageLength ) + Fast.ceilLog2( Fast.ceilLog2( averageLength ) ) ) : log2BucketSize;
bucketSize = 1 << this.log2BucketSize;
final Iterable<BitVector> bitVectors = TransformationStrategies.wrap( elements, transform );
LOGGER.debug( "Average length: " + averageLength );
LOGGER.debug( "Bucket size: " + bucketSize );
distributor = new RelativeTrieDistributor<BitVector>( bitVectors, bucketSize, TransformationStrategies.identity() );
offset = new MWHCFunction<BitVector>( bitVectors, TransformationStrategies.identity(), new AbstractLongList() {
public long getLong( int index ) {
return index % bucketSize;
}
public int size() {
return size;
}
}, log2BucketSize );
LOGGER.debug( "Actual bit cost per element: " + (double)numBits() / size );
}
public int size() {
return size;
}
public long numBits() {
return distributor.numBits() + offset.numBits() + transform.numBits();
}
public static void main( final String[] arg ) throws NoSuchMethodException, IOException, JSAPException {
final SimpleJSAP jsap = new SimpleJSAP( RelativeTrieMonotoneMinimalPerfectHashFunction.class.getName(), "Builds an PaCo trie-based monotone minimal perfect hash function reading a newline-separated list of strings.",
new Parameter[] {
new FlaggedOption( "encoding", ForNameStringParser.getParser( Charset.class ), "UTF-8", JSAP.NOT_REQUIRED, 'e', "encoding", "The string file encoding." ),
new Switch( "huTucker", 'h', "hu-tucker", "Use Hu-Tucker coding to reduce string length." ),
new Switch( "iso", 'i', "iso", "Use ISO-8859-1 coding (i.e., just use the lower eight bits of each character)." ),
new Switch( "zipped", 'z', "zipped", "The string list is compressed in gzip format." ),
new UnflaggedOption( "function", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, JSAP.NOT_GREEDY, "The filename for the serialised monotone minimal perfect hash function." ),
new UnflaggedOption( "stringFile", JSAP.STRING_PARSER, "-", JSAP.NOT_REQUIRED, JSAP.NOT_GREEDY, "The name of a file containing a newline-separated list of strings, or - for standard input; in the first case, strings will not be loaded into core memory." ),
});
JSAPResult jsapResult = jsap.parse( arg );
if ( jsap.messagePrinted() ) return;
final String functionName = jsapResult.getString( "function" );
final String stringFile = jsapResult.getString( "stringFile" );
final Charset encoding = (Charset)jsapResult.getObject( "encoding" );
final boolean zipped = jsapResult.getBoolean( "zipped" );
final boolean iso = jsapResult.getBoolean( "iso" );
final boolean huTucker = jsapResult.getBoolean( "huTucker" );
final Collection<MutableString> collection;
if ( "-".equals( stringFile ) ) {
final ProgressLogger pl = new ProgressLogger( LOGGER );
pl.start( "Loading strings..." );
collection = new LineIterator( new FastBufferedReader( new InputStreamReader( zipped ? new GZIPInputStream( System.in ) : System.in, encoding ) ), pl ).allLines();
pl.done();
}
else collection = new FileLinesCollection( stringFile, encoding.toString(), zipped );
final TransformationStrategy<CharSequence> transformationStrategy = huTucker
? new HuTuckerTransformationStrategy( collection, true )
: iso
? TransformationStrategies.prefixFreeIso()
: TransformationStrategies.prefixFreeUtf16();
BinIO.storeObject( new RelativeTrieMonotoneMinimalPerfectHashFunction<CharSequence>( collection, transformationStrategy ), functionName );
LOGGER.info( "Completed." );
}
}
| src/it/unimi/dsi/sux4j/mph/RelativeTrieMonotoneMinimalPerfectHashFunction.java | package it.unimi.dsi.sux4j.mph;
/*
* Sux4J: Succinct data structures for Java
*
* Copyright (C) 2008 Sebastiano Vigna
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
import it.unimi.dsi.Util;
import it.unimi.dsi.bits.BitVector;
import it.unimi.dsi.bits.Fast;
import it.unimi.dsi.bits.HuTuckerTransformationStrategy;
import it.unimi.dsi.bits.TransformationStrategies;
import it.unimi.dsi.bits.TransformationStrategy;
import it.unimi.dsi.fastutil.io.BinIO;
import it.unimi.dsi.fastutil.longs.AbstractLongList;
import it.unimi.dsi.io.FastBufferedReader;
import it.unimi.dsi.io.FileLinesCollection;
import it.unimi.dsi.io.LineIterator;
import it.unimi.dsi.lang.MutableString;
import it.unimi.dsi.logging.ProgressLogger;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.zip.GZIPInputStream;
import org.apache.log4j.Logger;
import com.martiansoftware.jsap.FlaggedOption;
import com.martiansoftware.jsap.JSAP;
import com.martiansoftware.jsap.JSAPException;
import com.martiansoftware.jsap.JSAPResult;
import com.martiansoftware.jsap.Parameter;
import com.martiansoftware.jsap.SimpleJSAP;
import com.martiansoftware.jsap.Switch;
import com.martiansoftware.jsap.UnflaggedOption;
import com.martiansoftware.jsap.stringparsers.ForNameStringParser;
/** A monotone minimal perfect hash implementation based on fixed-size bucketing that uses
* a {@linkplain RelativeTrieDistributor relative trie} as a distributor.
*
*/
public class RelativeTrieMonotoneMinimalPerfectHashFunction<T> extends AbstractHashFunction<T> implements Serializable {
public static final long serialVersionUID = 1L;
private static final Logger LOGGER = Util.getLogger( RelativeTrieMonotoneMinimalPerfectHashFunction.class );
/** The number of elements. */
private final int size;
/** The size of a bucket. */
private final int bucketSize;
/** {@link Fast#ceilLog2(int)} of {@link #bucketSize}. */
private final int log2BucketSize;
/** The transformation strategy. */
private final TransformationStrategy<? super T> transform;
/** A hollow trie distributor assigning keys to buckets. */
private final RelativeTrieDistributor<BitVector> distributor;
/** The offset of each element into his bucket. */
private final MWHCFunction<BitVector> offset;
@SuppressWarnings("unchecked")
public long getLong( final Object o ) {
if ( size == 0 ) return -1;
final BitVector bv = transform.toBitVector( (T)o ).fast();
final long bucket = distributor.getLong( bv );
return ( bucket << log2BucketSize ) + offset.getLong( bv );
}
/** Creates a new hollow-trie-based monotone minimal perfect hash function using the given
* elements and transformation strategy.
*
* @param elements the elements among which the trie must be able to rank.
* @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
* distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
*/
public RelativeTrieMonotoneMinimalPerfectHashFunction( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform ) {
this( elements, transform, -1 );
}
/** Creates a new hollow-trie-based monotone minimal perfect hash function using the given
* elements, transformation strategy and bucket size.
*
* <p>This constructor is mainly for debugging and testing purposes.
*
* @param elements the elements among which the trie must be able to rank.
* @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
* distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
* @param log2BucketSize the logarithm of the bucket size.
*/
public RelativeTrieMonotoneMinimalPerfectHashFunction( final Iterable<? extends T> elements, final TransformationStrategy<? super T> transform, int log2BucketSize ) {
this.transform = transform;
long maxLength = 0;
long totalLength = 0;
int c = 0;
BitVector bv;
for( T s: elements ) {
bv = transform.toBitVector( s );
maxLength = Math.max( maxLength, bv.length() );
totalLength += bv.length();
c++;
}
size = c;
if ( size == 0 ) {
bucketSize = this.log2BucketSize = 0;
distributor = null;
offset = null;
return;
}
final long averageLength = ( totalLength + size - 1 ) / size;
this.log2BucketSize = log2BucketSize == -1 ? Fast.mostSignificantBit( 16 + 7 * Fast.ceilLog2( averageLength ) + Fast.ceilLog2( Fast.ceilLog2( averageLength ) ) ) : log2BucketSize;
bucketSize = 1 << log2BucketSize;
final Iterable<BitVector> bitVectors = TransformationStrategies.wrap( elements, transform );
LOGGER.debug( "Average length: " + averageLength );
LOGGER.debug( "Bucket size: " + bucketSize );
distributor = new RelativeTrieDistributor<BitVector>( bitVectors, bucketSize, TransformationStrategies.identity() );
offset = new MWHCFunction<BitVector>( bitVectors, TransformationStrategies.identity(), new AbstractLongList() {
public long getLong( int index ) {
return index % bucketSize;
}
public int size() {
return size;
}
}, log2BucketSize );
LOGGER.debug( "Actual bit cost per element: " + (double)numBits() / size );
}
public int size() {
return size;
}
public long numBits() {
return distributor.numBits() + offset.numBits() + transform.numBits();
}
public static void main( final String[] arg ) throws NoSuchMethodException, IOException, JSAPException {
final SimpleJSAP jsap = new SimpleJSAP( RelativeTrieMonotoneMinimalPerfectHashFunction.class.getName(), "Builds an PaCo trie-based monotone minimal perfect hash function reading a newline-separated list of strings.",
new Parameter[] {
new FlaggedOption( "encoding", ForNameStringParser.getParser( Charset.class ), "UTF-8", JSAP.NOT_REQUIRED, 'e', "encoding", "The string file encoding." ),
new Switch( "huTucker", 'h', "hu-tucker", "Use Hu-Tucker coding to reduce string length." ),
new Switch( "iso", 'i', "iso", "Use ISO-8859-1 coding (i.e., just use the lower eight bits of each character)." ),
new Switch( "zipped", 'z', "zipped", "The string list is compressed in gzip format." ),
new UnflaggedOption( "function", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.REQUIRED, JSAP.NOT_GREEDY, "The filename for the serialised monotone minimal perfect hash function." ),
new UnflaggedOption( "stringFile", JSAP.STRING_PARSER, "-", JSAP.NOT_REQUIRED, JSAP.NOT_GREEDY, "The name of a file containing a newline-separated list of strings, or - for standard input; in the first case, strings will not be loaded into core memory." ),
});
JSAPResult jsapResult = jsap.parse( arg );
if ( jsap.messagePrinted() ) return;
final String functionName = jsapResult.getString( "function" );
final String stringFile = jsapResult.getString( "stringFile" );
final Charset encoding = (Charset)jsapResult.getObject( "encoding" );
final boolean zipped = jsapResult.getBoolean( "zipped" );
final boolean iso = jsapResult.getBoolean( "iso" );
final boolean huTucker = jsapResult.getBoolean( "huTucker" );
final Collection<MutableString> collection;
if ( "-".equals( stringFile ) ) {
final ProgressLogger pl = new ProgressLogger( LOGGER );
pl.start( "Loading strings..." );
collection = new LineIterator( new FastBufferedReader( new InputStreamReader( zipped ? new GZIPInputStream( System.in ) : System.in, encoding ) ), pl ).allLines();
pl.done();
}
else collection = new FileLinesCollection( stringFile, encoding.toString(), zipped );
final TransformationStrategy<CharSequence> transformationStrategy = huTucker
? new HuTuckerTransformationStrategy( collection, true )
: iso
? TransformationStrategies.prefixFreeIso()
: TransformationStrategies.prefixFreeUtf16();
BinIO.storeObject( new RelativeTrieMonotoneMinimalPerfectHashFunction<CharSequence>( collection, transformationStrategy ), functionName );
LOGGER.info( "Completed." );
}
}
| Tweaking
| src/it/unimi/dsi/sux4j/mph/RelativeTrieMonotoneMinimalPerfectHashFunction.java | Tweaking | <ide><path>rc/it/unimi/dsi/sux4j/mph/RelativeTrieMonotoneMinimalPerfectHashFunction.java
<ide> final long averageLength = ( totalLength + size - 1 ) / size;
<ide>
<ide> this.log2BucketSize = log2BucketSize == -1 ? Fast.mostSignificantBit( 16 + 7 * Fast.ceilLog2( averageLength ) + Fast.ceilLog2( Fast.ceilLog2( averageLength ) ) ) : log2BucketSize;
<del> bucketSize = 1 << log2BucketSize;
<add> bucketSize = 1 << this.log2BucketSize;
<ide>
<ide> final Iterable<BitVector> bitVectors = TransformationStrategies.wrap( elements, transform );
<ide> LOGGER.debug( "Average length: " + averageLength ); |
|
Java | mit | 6b9abd3c389675fbb462ca2412a1259cd628746e | 0 | JPMoresmau/sqlg,pietermartin/sqlg,JPMoresmau/sqlg,pietermartin/sqlg,pietermartin/sqlg,JPMoresmau/sqlg,pietermartin/sqlg | package org.umlg.sqlg.structure;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.tinkerpop.gremlin.process.computer.GraphComputer;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.*;
import org.apache.tinkerpop.gremlin.structure.io.Io;
import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
import org.apache.tinkerpop.gremlin.structure.util.FeatureDescriptor;
import org.apache.tinkerpop.gremlin.structure.util.StringFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.umlg.sqlg.sql.dialect.SqlDialect;
import org.umlg.sqlg.sql.parse.GremlinParser;
import org.umlg.sqlg.strategy.SqlgGraphStepStrategy;
import org.umlg.sqlg.strategy.SqlgVertexStepStrategy;
import org.umlg.sqlg.strategy.TopologyStrategy;
import org.umlg.sqlg.util.SqlgUtil;
import java.lang.reflect.Constructor;
import java.sql.*;
import java.util.*;
import java.util.stream.Stream;
/**
* Date: 2014/07/12
* Time: 5:38 AM
*/
@Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_PERFORMANCE)
@Graph.OptIn(Graph.OptIn.SUITE_PROCESS_PERFORMANCE)
@Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_PROCESS_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_PROCESS_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_INTEGRATE)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_PERFORMANCE)
//These are to debug travis
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldReadWriteModernToFileWithHelpers",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldReadWriteClassic",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldReadWriteModern",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldReadWriteClassicToFileWithHelpers",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldMigrateModernGraph",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
method = "shouldMigrateClassicGraph",
reason = "travis hangs.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.ExplainTest$Traversals",
method = "g_V_outE_identity_inV_explain",
reason = "Assertions assume that the strategies are in a particular order.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.filter.HasTest$Traversals",
method = "g_V_hasId_compilationEquality",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "modern_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "grateful_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "g_V_repeat_both_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "grateful_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "g_V_repeat_both_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "modern_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "testProfileStrategyCallback",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "testProfileStrategyCallbackSideEffect",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "modern_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "grateful_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "g_V_repeat_both_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "grateful_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "g_V_repeat_both_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "modern_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "testProfileStrategyCallback",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "testProfileStrategyCallbackSideEffect",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.SerializationTest$GraphSONTest",
method = "shouldSerializeTraversalMetrics",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX3X_count",
reason = "Takes too long, and too much memory at present.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_both_both_count",
reason = "Travis times out.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX5X_asXaX_outXwrittenByX_asXbX_selectXa_bX_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX8X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX3X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX8X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX5X_asXaX_outXwrittenByX_asXbX_selectXa_bX_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest$Traversals",
method = "g_V_repeatXbothX_timesX10X_asXaX_out_asXbX_selectXa_bX",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutorPerformanceTest",
method = "executorEval",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.GraphTest",
method = "shouldHaveStandardStringRepresentation",
reason = "SQLGGRAPH INCLUDES THE JDBC CONNECTION URL.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.GraphTest",
method = "shouldHaveStandardStringRepresentation",
reason = "SQLGGRAPH INCLUDES THE JDBC CONNECTION URL.")
public class SqlgGraph implements Graph {
public static final String JDBC_URL = "jdbc.url";
public static final String MODE_FOR_STREAM_VERTEX = " mode for streamVertex";
public static final String TRANSACTION_MUST_BE_IN = "Transaction must be in ";
private final SqlgDataSource sqlgDataSource;
private Logger logger = LoggerFactory.getLogger(SqlgGraph.class.getName());
private final SqlgTransaction sqlgTransaction;
private SchemaManager schemaManager;
private GremlinParser gremlinParser;
private SqlDialect sqlDialect;
private String jdbcUrl;
private ObjectMapper mapper = new ObjectMapper();
private boolean implementForeignKeys;
private Configuration configuration = new BaseConfiguration();
private final ISqlGFeatures features = new SqlGFeatures();
static {
TraversalStrategies.GlobalCache.registerStrategies(Graph.class, TraversalStrategies.GlobalCache.getStrategies(Graph.class)
.addStrategies(new SqlgVertexStepStrategy())
.addStrategies(new SqlgGraphStepStrategy())
.addStrategies(TopologyStrategy.build().create()));
}
public static <G extends Graph> G open(final Configuration configuration) {
if (null == configuration) throw Graph.Exceptions.argumentCanNotBeNull("configuration");
if (!configuration.containsKey(JDBC_URL))
throw new IllegalArgumentException(String.format("SqlgGraph configuration requires that the %s be set", JDBC_URL));
SqlgGraph sqlgGraph = new SqlgGraph(configuration);
sqlgGraph.schemaManager.loadSchema();
return (G) sqlgGraph;
}
public static <G extends Graph> G open(final String pathToSqlgProperties) {
if (null == pathToSqlgProperties) throw Graph.Exceptions.argumentCanNotBeNull("pathToSqlgProperties");
Configuration configuration;
try {
configuration = new PropertiesConfiguration(pathToSqlgProperties);
} catch (ConfigurationException e) {
throw new RuntimeException(e);
}
return open(configuration);
}
private SqlgGraph(final Configuration configuration) {
try {
Class<?> sqlDialectClass = findSqlgDialect();
logger.debug(String.format("Initializing Sqlg with %s dialect", sqlDialectClass.getSimpleName()));
Constructor<?> constructor = sqlDialectClass.getConstructor(Configuration.class);
this.sqlDialect = (SqlDialect) constructor.newInstance(configuration);
this.implementForeignKeys = configuration.getBoolean("implement.foreign.keys", true);
this.configuration = configuration;
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
this.jdbcUrl = this.configuration.getString(JDBC_URL);
this.sqlgDataSource = SqlgDataSource.setupDataSource(
sqlDialect.getJdbcDriver(),
this.configuration
);
logger.info(String.format("Connection url = %s , maxPoolSize = %d ", this.configuration.getString(JDBC_URL), configuration.getInt("maxPoolSize", 100)));
this.sqlDialect.prepareDB(this.sqlgDataSource.get(configuration.getString(JDBC_URL)).getConnection());
} catch (Exception e) {
throw new RuntimeException(e);
}
this.sqlgTransaction = new SqlgTransaction(this, this.configuration.getBoolean("cache.vertices", false));
this.tx().readWrite();
this.schemaManager = new SchemaManager(this, sqlDialect, configuration);
this.gremlinParser = new GremlinParser(this);
if (!this.sqlDialect.supportSchemas() && !this.schemaManager.schemaExist(this.sqlDialect.getPublicSchema())) {
//This is for mariadb. Need to make sure a db called public exist
this.schemaManager.createSchema(this.sqlDialect.getPublicSchema());
}
this.tx().commit();
}
Configuration getConfiguration() {
return configuration;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public SchemaManager getSchemaManager() {
return schemaManager;
}
public GremlinParser getGremlinParser() {
return gremlinParser;
}
public SqlDialect getSqlDialect() {
return sqlDialect;
}
@Override
public GraphTraversalSource traversal() {
return this.traversal(SqlgGraphTraversalSource.class);
}
public GraphTraversalSource topology() {
return this.traversal().withStrategies(TopologyStrategy.build().selectFrom(SchemaManager.SQLG_SCHEMA_SCHEMA_TABLES).create());
}
@Override
public Configuration configuration() {
return this.configuration;
}
public Vertex addVertex(String label, Map<String, Object> keyValues) {
Map<Object, Object> tmp = new HashMap<>(keyValues);
tmp.put(T.label, label);
return addVertex(SqlgUtil.mapTokeyValues(tmp));
}
@Override
public Vertex addVertex(Object... keyValues) {
if (this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode("Transaction is in " + this.tx().getBatchModeType().toString() + ", use streamVertex(Object ... keyValues)");
}
if (this.tx().isInStreamingWithLockBatchMode()) {
return internalStreamVertex(keyValues);
} else {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
validateVertexKeysValues(keyValues);
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.tx().readWrite();
this.schemaManager.ensureVertexTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, false, schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
}
}
public void streamVertex(String label) {
this.streamVertex(label, new LinkedHashMap<>());
}
public void streamVertex(Object... keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
internalStreamVertex(keyValues);
}
public void streamVertex(String label, LinkedHashMap<String, Object> keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
Map<Object, Object> tmp = new LinkedHashMap<>(keyValues);
tmp.put(T.label, label);
Object[] keyValues1 = SqlgUtil.mapTokeyValues(tmp);
streamVertex(keyValues1);
}
public void streamTemporaryVertex(String label, LinkedHashMap<String, Object> keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
Map<Object, Object> tmp = new LinkedHashMap<>(keyValues);
tmp.put(T.label, label);
Object[] keyValues1 = SqlgUtil.mapTokeyValues(tmp);
streamTemporaryVertex(keyValues1);
}
public void streamTemporaryVertex(Object... keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
internalStreamTemporaryVertex(keyValues);
}
private SqlgVertex internalStreamTemporaryVertex(Object... keyValues) {
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
SchemaTable streamingBatchModeVertexSchemaTable = this.tx().getBatchManager().getStreamingBatchModeVertexSchemaTable();
if (streamingBatchModeVertexSchemaTable != null && !streamingBatchModeVertexSchemaTable.toString().equals(schemaTablePair.toString())) {
throw new IllegalStateException("Streaming batch mode must occur for one label at a time. Expected \"" + streamingBatchModeVertexSchemaTable + "\" found \"" + label + "\". First commit the transaction or call SqlgGraph.flush() before streaming a different label");
}
List<String> keys = this.tx().getBatchManager().getStreamingBatchModeVertexKeys();
validateVertexKeysValues(keyValues, keys);
this.tx().readWrite();
this.schemaManager.ensureVertexTemporaryTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, schemaTablePair.getTable(), keyValues);
}
private SqlgVertex internalStreamVertex(Object... keyValues) {
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
SchemaTable streamingBatchModeVertexSchemaTable = this.tx().getBatchManager().getStreamingBatchModeVertexSchemaTable();
if (streamingBatchModeVertexSchemaTable != null && !streamingBatchModeVertexSchemaTable.toString().equals(schemaTablePair.toString())) {
throw new IllegalStateException("Streaming batch mode must occur for one label at a time. Expected \"" + streamingBatchModeVertexSchemaTable + "\" found \"" + label + "\". First commit the transaction or call SqlgGraph.flush() before streaming a different label");
}
List<String> keys = this.tx().getBatchManager().getStreamingBatchModeVertexKeys();
validateVertexKeysValues(keyValues, keys);
this.tx().readWrite();
this.schemaManager.ensureVertexTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, true, schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
}
public void bulkAddEdges(String inVertexLabel, String outVertexLabel, String edgeLabel, Pair<String, String> idFields, List<? extends Pair<String, String>> uids) {
if (!this.tx().isInStreamingBatchMode() && !this.tx().isInStreamingWithLockBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + BatchManager.BatchModeType.STREAMING + " or " + BatchManager.BatchModeType.STREAMING_WITH_LOCK + " mode for bulkAddEdges");
}
SchemaTable inSchemaTable = SchemaTable.from(this, inVertexLabel, this.sqlDialect.getPublicSchema());
SchemaTable outSchemaTable = SchemaTable.from(this, outVertexLabel, this.sqlDialect.getPublicSchema());
this.sqlDialect.bulkAddEdges(this, inSchemaTable, outSchemaTable, edgeLabel, idFields, uids);
}
private void validateVertexKeysValues(Object[] keyValues) {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
int i = 0;
Object key = null;
Object value;
for (Object keyValue : keyValues) {
if (i++ % 2 == 0) {
key = keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty((String) key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
}
private void validateVertexKeysValues(Object[] keyValues, List<String> previousBatchModeKeys) {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
int i = 0;
int keyCount = 0;
Object key = null;
Object value;
for (Object keyValue : keyValues) {
if (i++ % 2 == 0) {
key = keyValue;
if (!key.equals(T.label) && previousBatchModeKeys != null && !previousBatchModeKeys.isEmpty() && !key.equals(previousBatchModeKeys.get(keyCount++))) {
throw new IllegalStateException("Streaming batch mode must occur for the same keys in the same order. Expected " + previousBatchModeKeys.get(keyCount - 1) + " found " + key);
}
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty((String) key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
}
@Override
public <C extends GraphComputer> C compute(Class<C> graphComputerClass) throws IllegalArgumentException {
throw Graph.Exceptions.graphComputerNotSupported();
}
@Override
public GraphComputer compute() {
throw Graph.Exceptions.graphComputerNotSupported();
}
@Override
public Iterator<Vertex> vertices(Object... vertexIds) {
this.tx().readWrite();
if (this.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
return createElementIterator(Vertex.class, vertexIds);
}
@Override
public Iterator<Edge> edges(Object... edgeIds) {
this.tx().readWrite();
if (this.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
return createElementIterator(Edge.class, edgeIds);
}
private <T extends Element> Iterator<T> createElementIterator(final Class<T> clazz, final Object... ids) {
if (0 == ids.length) {
return (Iterator<T>) elements(Vertex.class.isAssignableFrom(clazz), Collections.EMPTY_LIST).iterator();
} else {
if (clazz.isAssignableFrom(ids[0].getClass())) {
// based on the first item assume all vertices in the argument list
if (!Stream.of(ids).allMatch(id -> clazz.isAssignableFrom(id.getClass())))
throw Graph.Exceptions.idArgsMustBeEitherIdOrElement();
return Stream.of(ids).map(id -> (T) id).iterator();
} else {
final Class<?> firstClass = ids[0].getClass();
if (!Stream.of(ids).map(Object::getClass).allMatch(firstClass::equals))
throw Graph.Exceptions.idArgsMustBeEitherIdOrElement();
List<RecordId> recordIds = RecordId.from(ids);
Iterable<T> elementIterable = elements(Vertex.class.isAssignableFrom(clazz), recordIds);
return elementIterable.iterator();
}
}
}
public Vertex v(final Object id) {
Iterator<Vertex> t = this.vertices(id);
return t.hasNext() ? t.next() : null;
}
public Edge e(final Object id) {
Iterator<Edge> t = this.edges(id);
return t.hasNext() ? t.next() : null;
}
@Override
public SqlgTransaction tx() {
return this.sqlgTransaction;
}
@Override
public Variables variables() {
throw Graph.Exceptions.variablesNotSupported();
}
@Override
public void close() throws Exception {
if (this.tx().isOpen())
this.tx().close();
this.schemaManager.close();
this.sqlgDataSource.close(this.getJdbcUrl());
}
@Override
public <I extends Io> I io(final Io.Builder<I> builder) {
return (I) builder.graph(this).registry(new SqlgIoRegistry()).create();
}
@Override
public String toString() {
return StringFactory.graphString(this, "SqlGraph") + " (" + configuration.getProperty(JDBC_URL) + ")";
}
public ISqlGFeatures features() {
return this.features;
}
public <T> T gis() {
return this.getSqlDialect().getGis(this);
}
public interface ISqlGFeatures extends Features {
boolean supportsBatchMode();
}
public class SqlGFeatures implements ISqlGFeatures {
@Override
public GraphFeatures graph() {
return new GraphFeatures() {
@Override
public boolean supportsComputer() {
return false;
}
@Override
public VariableFeatures variables() {
return new SqlVariableFeatures();
}
@Override
public boolean supportsThreadedTransactions() {
return false;
}
};
}
@Override
public VertexFeatures vertex() {
return new SqlVertexFeatures();
}
@Override
public EdgeFeatures edge() {
return new SqlEdgeFeatures();
}
@Override
public String toString() {
return StringFactory.featureString(this);
}
@Override
public boolean supportsBatchMode() {
return getSqlDialect().supportsBatchMode();
}
public class SqlVertexFeatures implements VertexFeatures {
@Override
@FeatureDescriptor(name = FEATURE_MULTI_PROPERTIES)
public boolean supportsMultiProperties() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_META_PROPERTIES)
public boolean supportsMetaProperties() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
public VertexPropertyFeatures properties() {
return new SqlGVertexPropertyFeatures();
}
@Override
public VertexProperty.Cardinality getCardinality(final String key) {
return VertexProperty.Cardinality.single;
}
}
public class SqlEdgeFeatures implements EdgeFeatures {
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
public EdgePropertyFeatures properties() {
return new SqlEdgePropertyFeatures();
}
}
public class SqlGVertexPropertyFeatures implements VertexPropertyFeatures {
@Override
@FeatureDescriptor(name = FEATURE_ADD_PROPERTY)
public boolean supportsAddProperty() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_REMOVE_PROPERTY)
public boolean supportsRemoveProperty() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsBooleanArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsDoubleArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsIntegerArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsLongArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsStringArrayValues();
}
}
public class SqlEdgePropertyFeatures implements EdgePropertyFeatures {
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsBooleanArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsDoubleArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsIntegerArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsLongArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsStringArrayValues();
}
}
public class SqlVariableFeatures implements VariableFeatures {
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_VALUES)
public boolean supportsBooleanValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_VALUES)
public boolean supportsDoubleValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_VALUES)
public boolean supportsIntegerValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_VALUES)
public boolean supportsLongValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_VALUES)
public boolean supportsStringValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
}
}
/**
* This is executes a sql query and returns the result as a json string.
*
* @param query The sql to executeRegularQuery.
* @return The query result as json.
*/
public String query(String query) {
try {
Connection conn = this.tx().getConnection();
ObjectNode result = this.mapper.createObjectNode();
ArrayNode dataNode = this.mapper.createArrayNode();
ArrayNode metaNode = this.mapper.createArrayNode();
Statement statement = conn.createStatement();
if (logger.isDebugEnabled()) {
logger.debug(query);
}
ResultSet rs = statement.executeQuery(query);
ResultSetMetaData rsmd = rs.getMetaData();
boolean first = true;
while (rs.next()) {
int numColumns = rsmd.getColumnCount();
ObjectNode obj = this.mapper.createObjectNode();
for (int i = 1; i < numColumns + 1; i++) {
String columnName = rsmd.getColumnLabel(i);
Object o = rs.getObject(columnName);
int type = rsmd.getColumnType(i);
this.sqlDialect.putJsonObject(obj, columnName, type, o);
if (first) {
this.sqlDialect.putJsonMetaObject(this.mapper, metaNode, columnName, type, o);
}
}
first = false;
dataNode.add(obj);
}
result.put("data", dataNode);
result.put("meta", metaNode);
return result.toString();
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
this.tx().rollback();
}
}
//indexing
public void createUniqueConstraint(String label, String propertyKey) {
throw new IllegalStateException("Not yet implemented!");
// this.tx().readWrite();
}
public void createVertexLabeledIndex(String label, Object... dummykeyValues) {
int i = 0;
String key = "";
Object value;
for (Object keyValue : dummykeyValues) {
if (i++ % 2 == 0) {
key = (String) keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty(key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
this.tx().readWrite();
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.getSchemaManager().createVertexIndex(schemaTablePair, dummykeyValues);
}
public void createEdgeLabeledIndex(String label, Object... dummykeyValues) {
int i = 0;
String key = "";
Object value;
for (Object keyValue : dummykeyValues) {
if (i++ % 2 == 0) {
key = (String) keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty(key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
this.tx().readWrite();
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.getSchemaManager().createEdgeIndex(schemaTablePair, dummykeyValues);
}
public long countVertices() {
this.tx().readWrite();
return countElements(true);
}
public long countEdges() {
this.tx().readWrite();
return countElements(false);
}
private long countElements(boolean returnVertices) {
long count = 0;
Set<String> tables = this.getSchemaManager().getAllTables().keySet();
for (String table : tables) {
SchemaTable schemaTable = SchemaTable.from(this, table, this.getSqlDialect().getPublicSchema());
if (returnVertices ? schemaTable.isVertexTable() : !schemaTable.isVertexTable()) {
StringBuilder sql = new StringBuilder("SELECT COUNT(1) FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
sql.append(schemaTable.getTable());
sql.append("\"");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
ResultSet rs = preparedStatement.executeQuery();
rs.next();
count += rs.getLong(1);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
return count;
}
boolean isImplementForeignKeys() {
return implementForeignKeys;
}
private Class<?> findSqlgDialect() {
try {
return Class.forName("org.umlg.sqlg.sql.dialect.PostgresDialect");
} catch (ClassNotFoundException e) {
}
try {
return Class.forName("org.umlg.sqlg.sql.dialect.MariaDbDialect");
} catch (ClassNotFoundException e) {
}
try {
return Class.forName("org.umlg.sqlg.sql.dialect.HsqldbDialect");
} catch (ClassNotFoundException e) {
}
throw new IllegalStateException("No sqlg dialect found!");
}
private <T extends Element> Iterable<T> elements(boolean returnVertices, final List<RecordId> elementIds) {
List<T> sqlgElements = new ArrayList<>();
if (elementIds.size() > 0) {
Map<SchemaTable, List<Long>> distinctTableIdMap = RecordId.normalizeIds(elementIds);
for (Map.Entry<SchemaTable, List<Long>> schemaTableListEntry : distinctTableIdMap.entrySet()) {
SchemaTable schemaTable = schemaTableListEntry.getKey();
String tableName = (returnVertices ? SchemaManager.VERTEX_PREFIX : SchemaManager.EDGE_PREFIX) + schemaTable.getTable();
if (this.getSchemaManager().getAllTables().containsKey(schemaTable.getSchema() + "." + tableName)) {
List<Long> schemaTableIds = schemaTableListEntry.getValue();
StringBuilder sql = new StringBuilder("SELECT * FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
if (returnVertices) {
sql.append(SchemaManager.VERTEX_PREFIX);
} else {
sql.append(SchemaManager.EDGE_PREFIX);
}
sql.append(schemaTable.getTable());
sql.append("\" WHERE ");
sql.append(this.sqlDialect.maybeWrapInQoutes("ID"));
sql.append(" IN (");
int count = 1;
for (Long id : schemaTableIds) {
sql.append(id.toString());
if (count++ < schemaTableIds.size()) {
sql.append(",");
}
}
sql.append(")");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (Statement statement = conn.createStatement()) {
statement.execute(sql.toString());
ResultSet resultSet = statement.getResultSet();
while (resultSet.next()) {
long id = resultSet.getLong("ID");
SqlgElement sqlgElement;
if (returnVertices) {
sqlgElement = SqlgVertex.of(this, id, schemaTable.getSchema(), schemaTable.getTable());
} else {
sqlgElement = new SqlgEdge(this, id, schemaTable.getSchema(), schemaTable.getTable());
}
sqlgElement.loadResultSet(resultSet);
sqlgElements.add((T) sqlgElement);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
} else {
//TODO use a union query
Set<String> tables = this.getSchemaManager().getAllTables().keySet();
for (String table : tables) {
SchemaTable schemaTable = SchemaTable.from(this, table, this.getSqlDialect().getPublicSchema());
if (returnVertices ? schemaTable.isVertexTable() : !schemaTable.isVertexTable()) {
StringBuilder sql = new StringBuilder("SELECT * FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
sql.append(schemaTable.getTable());
sql.append("\"");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (Statement statement = conn.createStatement()) {
statement.execute(sql.toString());
ResultSet resultSet = statement.getResultSet();
while (resultSet.next()) {
long id = resultSet.getLong("ID");
SqlgElement sqlgElement;
if (returnVertices) {
sqlgElement = SqlgVertex.of(this, id, schemaTable.getSchema(), schemaTable.getTable().substring(SchemaManager.VERTEX_PREFIX.length()));
} else {
sqlgElement = new SqlgEdge(this, id, schemaTable.getSchema(), schemaTable.getTable().substring(SchemaManager.EDGE_PREFIX.length()));
}
sqlgElement.loadResultSet(resultSet);
sqlgElements.add((T) sqlgElement);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
}
return sqlgElements;
}
public SqlgDataSource getSqlgDataSource() {
return sqlgDataSource;
}
public void drop() {
Connection conn = this.tx().getConnection();
try {
DatabaseMetaData metadata = conn.getMetaData();
String catalog = null;
String schemaPattern = null;
String tableNamePattern = "%";
String[] types = {"TABLE"};
ResultSet result = metadata.getTables(catalog, schemaPattern, tableNamePattern, types);
while (result.next()) {
String schema = result.getString(2);
String table = result.getString(3);
if (sqlDialect.getGisSchemas().contains(schema) || sqlDialect.getSpacialRefTable().contains(table)) {
continue;
}
StringBuilder sql = new StringBuilder("DROP TABLE ");
sql.append(sqlDialect.maybeWrapInQoutes(schema));
sql.append(".");
sql.append(sqlDialect.maybeWrapInQoutes(table));
sql.append(" CASCADE");
if (sqlDialect.needsSemicolon()) {
sql.append(";");
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
preparedStatement.executeUpdate();
}
}
catalog = null;
schemaPattern = null;
result = metadata.getSchemas(catalog, schemaPattern);
while (result.next()) {
String schema = result.getString(1);
if (!sqlDialect.getDefaultSchemas().contains(schema) && !sqlDialect.getGisSchemas().contains(schema)) {
StringBuilder sql = new StringBuilder("DROP SCHEMA ");
sql.append(sqlDialect.maybeWrapInQoutes(schema));
sql.append(" CASCADE");
if (sqlDialect.needsSemicolon()) {
sql.append(";");
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
preparedStatement.executeUpdate();
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
| sqlg-core/src/main/java/org/umlg/sqlg/structure/SqlgGraph.java | package org.umlg.sqlg.structure;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.tinkerpop.gremlin.process.computer.GraphComputer;
import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.*;
import org.apache.tinkerpop.gremlin.structure.io.Io;
import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
import org.apache.tinkerpop.gremlin.structure.util.FeatureDescriptor;
import org.apache.tinkerpop.gremlin.structure.util.StringFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.umlg.sqlg.sql.dialect.SqlDialect;
import org.umlg.sqlg.sql.parse.GremlinParser;
import org.umlg.sqlg.strategy.SqlgGraphStepStrategy;
import org.umlg.sqlg.strategy.SqlgVertexStepStrategy;
import org.umlg.sqlg.strategy.TopologyStrategy;
import org.umlg.sqlg.util.SqlgUtil;
import java.lang.reflect.Constructor;
import java.sql.*;
import java.util.*;
import java.util.stream.Stream;
/**
* Date: 2014/07/12
* Time: 5:38 AM
*/
@Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_PERFORMANCE)
@Graph.OptIn(Graph.OptIn.SUITE_PROCESS_PERFORMANCE)
@Graph.OptIn(Graph.OptIn.SUITE_STRUCTURE_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_PROCESS_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_PROCESS_STANDARD)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_INTEGRATE)
@Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_PERFORMANCE)
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.ExplainTest$Traversals",
method = "g_V_outE_identity_inV_explain",
reason = "Assertions assume that the strategies are in a particular order.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.filter.HasTest$Traversals",
method = "g_V_hasId_compilationEquality",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "modern_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "grateful_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "g_V_repeat_both_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "grateful_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "g_V_repeat_both_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "modern_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "testProfileStrategyCallback",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.ProfileTest$Traversals",
method = "testProfileStrategyCallbackSideEffect",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "modern_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "grateful_V_out_out_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "g_V_repeat_both_profileXmetricsX",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "grateful_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "g_V_repeat_both_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "modern_V_out_out_profile",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "testProfileStrategyCallback",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyProfileTest$Traversals",
method = "testProfileStrategyCallbackSideEffect",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.SerializationTest$GraphSONTest",
method = "shouldSerializeTraversalMetrics",
reason = "Assertions are TinkerGraph specific.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX3X_count",
reason = "Takes too long, and too much memory at present.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_both_both_count",
reason = "Travis times out.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX5X_asXaX_outXwrittenByX_asXbX_selectXa_bX_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.CountTest$Traversals",
method = "g_V_repeatXoutX_timesX8X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX3X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX8X_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.map.GroovyCountTest$Traversals",
method = "g_V_repeatXoutX_timesX5X_asXaX_outXwrittenByX_asXbX_selectXa_bX_count",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatTest$Traversals",
method = "g_V_repeatXbothX_timesX10X_asXaX_out_asXbX_selectXa_bX",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutorPerformanceTest",
method = "executorEval",
reason = "Takes too long")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.GraphTest",
method = "shouldHaveStandardStringRepresentation",
reason = "SQLGGRAPH INCLUDES THE JDBC CONNECTION URL.")
@Graph.OptOut(
test = "org.apache.tinkerpop.gremlin.structure.GraphTest",
method = "shouldHaveStandardStringRepresentation",
reason = "SQLGGRAPH INCLUDES THE JDBC CONNECTION URL.")
public class SqlgGraph implements Graph {
public static final String JDBC_URL = "jdbc.url";
public static final String MODE_FOR_STREAM_VERTEX = " mode for streamVertex";
public static final String TRANSACTION_MUST_BE_IN = "Transaction must be in ";
private final SqlgDataSource sqlgDataSource;
private Logger logger = LoggerFactory.getLogger(SqlgGraph.class.getName());
private final SqlgTransaction sqlgTransaction;
private SchemaManager schemaManager;
private GremlinParser gremlinParser;
private SqlDialect sqlDialect;
private String jdbcUrl;
private ObjectMapper mapper = new ObjectMapper();
private boolean implementForeignKeys;
private Configuration configuration = new BaseConfiguration();
private final ISqlGFeatures features = new SqlGFeatures();
static {
TraversalStrategies.GlobalCache.registerStrategies(Graph.class, TraversalStrategies.GlobalCache.getStrategies(Graph.class)
.addStrategies(new SqlgVertexStepStrategy())
.addStrategies(new SqlgGraphStepStrategy())
.addStrategies(TopologyStrategy.build().create()));
}
public static <G extends Graph> G open(final Configuration configuration) {
if (null == configuration) throw Graph.Exceptions.argumentCanNotBeNull("configuration");
if (!configuration.containsKey(JDBC_URL))
throw new IllegalArgumentException(String.format("SqlgGraph configuration requires that the %s be set", JDBC_URL));
SqlgGraph sqlgGraph = new SqlgGraph(configuration);
sqlgGraph.schemaManager.loadSchema();
return (G) sqlgGraph;
}
public static <G extends Graph> G open(final String pathToSqlgProperties) {
if (null == pathToSqlgProperties) throw Graph.Exceptions.argumentCanNotBeNull("pathToSqlgProperties");
Configuration configuration;
try {
configuration = new PropertiesConfiguration(pathToSqlgProperties);
} catch (ConfigurationException e) {
throw new RuntimeException(e);
}
return open(configuration);
}
private SqlgGraph(final Configuration configuration) {
try {
Class<?> sqlDialectClass = findSqlgDialect();
logger.debug(String.format("Initializing Sqlg with %s dialect", sqlDialectClass.getSimpleName()));
Constructor<?> constructor = sqlDialectClass.getConstructor(Configuration.class);
this.sqlDialect = (SqlDialect) constructor.newInstance(configuration);
this.implementForeignKeys = configuration.getBoolean("implement.foreign.keys", true);
this.configuration = configuration;
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
this.jdbcUrl = this.configuration.getString(JDBC_URL);
this.sqlgDataSource = SqlgDataSource.setupDataSource(
sqlDialect.getJdbcDriver(),
this.configuration
);
logger.info(String.format("Connection url = %s , maxPoolSize = %d ", this.configuration.getString(JDBC_URL), configuration.getInt("maxPoolSize", 100)));
this.sqlDialect.prepareDB(this.sqlgDataSource.get(configuration.getString(JDBC_URL)).getConnection());
} catch (Exception e) {
throw new RuntimeException(e);
}
this.sqlgTransaction = new SqlgTransaction(this, this.configuration.getBoolean("cache.vertices", false));
this.tx().readWrite();
this.schemaManager = new SchemaManager(this, sqlDialect, configuration);
this.gremlinParser = new GremlinParser(this);
if (!this.sqlDialect.supportSchemas() && !this.schemaManager.schemaExist(this.sqlDialect.getPublicSchema())) {
//This is for mariadb. Need to make sure a db called public exist
this.schemaManager.createSchema(this.sqlDialect.getPublicSchema());
}
this.tx().commit();
}
Configuration getConfiguration() {
return configuration;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public SchemaManager getSchemaManager() {
return schemaManager;
}
public GremlinParser getGremlinParser() {
return gremlinParser;
}
public SqlDialect getSqlDialect() {
return sqlDialect;
}
@Override
public GraphTraversalSource traversal() {
return this.traversal(SqlgGraphTraversalSource.class);
}
public GraphTraversalSource topology() {
return this.traversal().withStrategies(TopologyStrategy.build().selectFrom(SchemaManager.SQLG_SCHEMA_SCHEMA_TABLES).create());
}
@Override
public Configuration configuration() {
return this.configuration;
}
public Vertex addVertex(String label, Map<String, Object> keyValues) {
Map<Object, Object> tmp = new HashMap<>(keyValues);
tmp.put(T.label, label);
return addVertex(SqlgUtil.mapTokeyValues(tmp));
}
@Override
public Vertex addVertex(Object... keyValues) {
if (this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode("Transaction is in " + this.tx().getBatchModeType().toString() + ", use streamVertex(Object ... keyValues)");
}
if (this.tx().isInStreamingWithLockBatchMode()) {
return internalStreamVertex(keyValues);
} else {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
validateVertexKeysValues(keyValues);
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.tx().readWrite();
this.schemaManager.ensureVertexTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, false, schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
}
}
public void streamVertex(String label) {
this.streamVertex(label, new LinkedHashMap<>());
}
public void streamVertex(Object... keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
internalStreamVertex(keyValues);
}
public void streamVertex(String label, LinkedHashMap<String, Object> keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
Map<Object, Object> tmp = new LinkedHashMap<>(keyValues);
tmp.put(T.label, label);
Object[] keyValues1 = SqlgUtil.mapTokeyValues(tmp);
streamVertex(keyValues1);
}
public void streamTemporaryVertex(String label, LinkedHashMap<String, Object> keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
Map<Object, Object> tmp = new LinkedHashMap<>(keyValues);
tmp.put(T.label, label);
Object[] keyValues1 = SqlgUtil.mapTokeyValues(tmp);
streamTemporaryVertex(keyValues1);
}
public void streamTemporaryVertex(Object... keyValues) {
if (!this.tx().isInStreamingBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + this.tx().getBatchModeType().toString() + MODE_FOR_STREAM_VERTEX);
}
internalStreamTemporaryVertex(keyValues);
}
private SqlgVertex internalStreamTemporaryVertex(Object... keyValues) {
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
SchemaTable streamingBatchModeVertexSchemaTable = this.tx().getBatchManager().getStreamingBatchModeVertexSchemaTable();
if (streamingBatchModeVertexSchemaTable != null && !streamingBatchModeVertexSchemaTable.toString().equals(schemaTablePair.toString())) {
throw new IllegalStateException("Streaming batch mode must occur for one label at a time. Expected \"" + streamingBatchModeVertexSchemaTable + "\" found \"" + label + "\". First commit the transaction or call SqlgGraph.flush() before streaming a different label");
}
List<String> keys = this.tx().getBatchManager().getStreamingBatchModeVertexKeys();
validateVertexKeysValues(keyValues, keys);
this.tx().readWrite();
this.schemaManager.ensureVertexTemporaryTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, schemaTablePair.getTable(), keyValues);
}
private SqlgVertex internalStreamVertex(Object... keyValues) {
final String label = ElementHelper.getLabelValue(keyValues).orElse(Vertex.DEFAULT_LABEL);
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
SchemaTable streamingBatchModeVertexSchemaTable = this.tx().getBatchManager().getStreamingBatchModeVertexSchemaTable();
if (streamingBatchModeVertexSchemaTable != null && !streamingBatchModeVertexSchemaTable.toString().equals(schemaTablePair.toString())) {
throw new IllegalStateException("Streaming batch mode must occur for one label at a time. Expected \"" + streamingBatchModeVertexSchemaTable + "\" found \"" + label + "\". First commit the transaction or call SqlgGraph.flush() before streaming a different label");
}
List<String> keys = this.tx().getBatchManager().getStreamingBatchModeVertexKeys();
validateVertexKeysValues(keyValues, keys);
this.tx().readWrite();
this.schemaManager.ensureVertexTableExist(schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
return new SqlgVertex(this, true, schemaTablePair.getSchema(), schemaTablePair.getTable(), keyValues);
}
public void bulkAddEdges(String inVertexLabel, String outVertexLabel, String edgeLabel, Pair<String, String> idFields, List<? extends Pair<String, String>> uids) {
if (!this.tx().isInStreamingBatchMode() && !this.tx().isInStreamingWithLockBatchMode()) {
throw SqlgExceptions.invalidMode(TRANSACTION_MUST_BE_IN + BatchManager.BatchModeType.STREAMING + " or " + BatchManager.BatchModeType.STREAMING_WITH_LOCK + " mode for bulkAddEdges");
}
SchemaTable inSchemaTable = SchemaTable.from(this, inVertexLabel, this.sqlDialect.getPublicSchema());
SchemaTable outSchemaTable = SchemaTable.from(this, outVertexLabel, this.sqlDialect.getPublicSchema());
this.sqlDialect.bulkAddEdges(this, inSchemaTable, outSchemaTable, edgeLabel, idFields, uids);
}
private void validateVertexKeysValues(Object[] keyValues) {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
int i = 0;
Object key = null;
Object value;
for (Object keyValue : keyValues) {
if (i++ % 2 == 0) {
key = keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty((String) key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
}
private void validateVertexKeysValues(Object[] keyValues, List<String> previousBatchModeKeys) {
ElementHelper.legalPropertyKeyValueArray(keyValues);
if (ElementHelper.getIdValue(keyValues).isPresent())
throw Vertex.Exceptions.userSuppliedIdsNotSupported();
int i = 0;
int keyCount = 0;
Object key = null;
Object value;
for (Object keyValue : keyValues) {
if (i++ % 2 == 0) {
key = keyValue;
if (!key.equals(T.label) && previousBatchModeKeys != null && !previousBatchModeKeys.isEmpty() && !key.equals(previousBatchModeKeys.get(keyCount++))) {
throw new IllegalStateException("Streaming batch mode must occur for the same keys in the same order. Expected " + previousBatchModeKeys.get(keyCount - 1) + " found " + key);
}
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty((String) key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
}
@Override
public <C extends GraphComputer> C compute(Class<C> graphComputerClass) throws IllegalArgumentException {
throw Graph.Exceptions.graphComputerNotSupported();
}
@Override
public GraphComputer compute() {
throw Graph.Exceptions.graphComputerNotSupported();
}
@Override
public Iterator<Vertex> vertices(Object... vertexIds) {
this.tx().readWrite();
if (this.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
return createElementIterator(Vertex.class, vertexIds);
}
@Override
public Iterator<Edge> edges(Object... edgeIds) {
this.tx().readWrite();
if (this.tx().getBatchManager().isStreaming()) {
throw new IllegalStateException("streaming is in progress, first flush or commit before querying.");
}
return createElementIterator(Edge.class, edgeIds);
}
private <T extends Element> Iterator<T> createElementIterator(final Class<T> clazz, final Object... ids) {
if (0 == ids.length) {
return (Iterator<T>) elements(Vertex.class.isAssignableFrom(clazz), Collections.EMPTY_LIST).iterator();
} else {
if (clazz.isAssignableFrom(ids[0].getClass())) {
// based on the first item assume all vertices in the argument list
if (!Stream.of(ids).allMatch(id -> clazz.isAssignableFrom(id.getClass())))
throw Graph.Exceptions.idArgsMustBeEitherIdOrElement();
return Stream.of(ids).map(id -> (T) id).iterator();
} else {
final Class<?> firstClass = ids[0].getClass();
if (!Stream.of(ids).map(Object::getClass).allMatch(firstClass::equals))
throw Graph.Exceptions.idArgsMustBeEitherIdOrElement();
List<RecordId> recordIds = RecordId.from(ids);
Iterable<T> elementIterable = elements(Vertex.class.isAssignableFrom(clazz), recordIds);
return elementIterable.iterator();
}
}
}
public Vertex v(final Object id) {
Iterator<Vertex> t = this.vertices(id);
return t.hasNext() ? t.next() : null;
}
public Edge e(final Object id) {
Iterator<Edge> t = this.edges(id);
return t.hasNext() ? t.next() : null;
}
@Override
public SqlgTransaction tx() {
return this.sqlgTransaction;
}
@Override
public Variables variables() {
throw Graph.Exceptions.variablesNotSupported();
}
@Override
public void close() throws Exception {
if (this.tx().isOpen())
this.tx().close();
this.schemaManager.close();
this.sqlgDataSource.close(this.getJdbcUrl());
}
@Override
public <I extends Io> I io(final Io.Builder<I> builder) {
return (I) builder.graph(this).registry(new SqlgIoRegistry()).create();
}
@Override
public String toString() {
return StringFactory.graphString(this, "SqlGraph") + " (" + configuration.getProperty(JDBC_URL) + ")";
}
public ISqlGFeatures features() {
return this.features;
}
public <T> T gis() {
return this.getSqlDialect().getGis(this);
}
public interface ISqlGFeatures extends Features {
boolean supportsBatchMode();
}
public class SqlGFeatures implements ISqlGFeatures {
@Override
public GraphFeatures graph() {
return new GraphFeatures() {
@Override
public boolean supportsComputer() {
return false;
}
@Override
public VariableFeatures variables() {
return new SqlVariableFeatures();
}
@Override
public boolean supportsThreadedTransactions() {
return false;
}
};
}
@Override
public VertexFeatures vertex() {
return new SqlVertexFeatures();
}
@Override
public EdgeFeatures edge() {
return new SqlEdgeFeatures();
}
@Override
public String toString() {
return StringFactory.featureString(this);
}
@Override
public boolean supportsBatchMode() {
return getSqlDialect().supportsBatchMode();
}
public class SqlVertexFeatures implements VertexFeatures {
@Override
@FeatureDescriptor(name = FEATURE_MULTI_PROPERTIES)
public boolean supportsMultiProperties() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_META_PROPERTIES)
public boolean supportsMetaProperties() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
public VertexPropertyFeatures properties() {
return new SqlGVertexPropertyFeatures();
}
@Override
public VertexProperty.Cardinality getCardinality(final String key) {
return VertexProperty.Cardinality.single;
}
}
public class SqlEdgeFeatures implements EdgeFeatures {
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
public EdgePropertyFeatures properties() {
return new SqlEdgePropertyFeatures();
}
}
public class SqlGVertexPropertyFeatures implements VertexPropertyFeatures {
@Override
@FeatureDescriptor(name = FEATURE_ADD_PROPERTY)
public boolean supportsAddProperty() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_REMOVE_PROPERTY)
public boolean supportsRemoveProperty() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_USER_SUPPLIED_IDS)
public boolean supportsUserSuppliedIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_NUMERIC_IDS)
public boolean supportsNumericIds() {
return true;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_IDS)
public boolean supportsStringIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UUID_IDS)
public boolean supportsUuidIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_CUSTOM_IDS)
public boolean supportsCustomIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_ANY_IDS)
public boolean supportsAnyIds() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsBooleanArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsDoubleArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsIntegerArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsLongArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsStringArrayValues();
}
}
public class SqlEdgePropertyFeatures implements EdgePropertyFeatures {
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsBooleanArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsByteArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsDoubleArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsFloatArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsIntegerArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsLongArrayValues();
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return SqlgGraph.this.getSchemaManager().getSqlDialect().supportsStringArrayValues();
}
}
public class SqlVariableFeatures implements VariableFeatures {
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_VALUES)
public boolean supportsBooleanValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_VALUES)
public boolean supportsDoubleValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_VALUES)
public boolean supportsFloatValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_VALUES)
public boolean supportsIntegerValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_VALUES)
public boolean supportsLongValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MAP_VALUES)
public boolean supportsMapValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_MIXED_LIST_VALUES)
public boolean supportsMixedListValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_VALUES)
public boolean supportsByteValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BOOLEAN_ARRAY_VALUES)
public boolean supportsBooleanArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_BYTE_ARRAY_VALUES)
public boolean supportsByteArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_DOUBLE_ARRAY_VALUES)
public boolean supportsDoubleArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_FLOAT_ARRAY_VALUES)
public boolean supportsFloatArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_INTEGER_ARRAY_VALUES)
public boolean supportsIntegerArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_LONG_ARRAY_VALUES)
public boolean supportsLongArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_ARRAY_VALUES)
public boolean supportsStringArrayValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_SERIALIZABLE_VALUES)
public boolean supportsSerializableValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_STRING_VALUES)
public boolean supportsStringValues() {
return false;
}
@Override
@FeatureDescriptor(name = FEATURE_UNIFORM_LIST_VALUES)
public boolean supportsUniformListValues() {
return false;
}
}
}
/**
* This is executes a sql query and returns the result as a json string.
*
* @param query The sql to executeRegularQuery.
* @return The query result as json.
*/
public String query(String query) {
try {
Connection conn = this.tx().getConnection();
ObjectNode result = this.mapper.createObjectNode();
ArrayNode dataNode = this.mapper.createArrayNode();
ArrayNode metaNode = this.mapper.createArrayNode();
Statement statement = conn.createStatement();
if (logger.isDebugEnabled()) {
logger.debug(query);
}
ResultSet rs = statement.executeQuery(query);
ResultSetMetaData rsmd = rs.getMetaData();
boolean first = true;
while (rs.next()) {
int numColumns = rsmd.getColumnCount();
ObjectNode obj = this.mapper.createObjectNode();
for (int i = 1; i < numColumns + 1; i++) {
String columnName = rsmd.getColumnLabel(i);
Object o = rs.getObject(columnName);
int type = rsmd.getColumnType(i);
this.sqlDialect.putJsonObject(obj, columnName, type, o);
if (first) {
this.sqlDialect.putJsonMetaObject(this.mapper, metaNode, columnName, type, o);
}
}
first = false;
dataNode.add(obj);
}
result.put("data", dataNode);
result.put("meta", metaNode);
return result.toString();
} catch (SQLException e) {
throw new RuntimeException(e);
} finally {
this.tx().rollback();
}
}
//indexing
public void createUniqueConstraint(String label, String propertyKey) {
throw new IllegalStateException("Not yet implemented!");
// this.tx().readWrite();
}
public void createVertexLabeledIndex(String label, Object... dummykeyValues) {
int i = 0;
String key = "";
Object value;
for (Object keyValue : dummykeyValues) {
if (i++ % 2 == 0) {
key = (String) keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty(key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
this.tx().readWrite();
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.getSchemaManager().createVertexIndex(schemaTablePair, dummykeyValues);
}
public void createEdgeLabeledIndex(String label, Object... dummykeyValues) {
int i = 0;
String key = "";
Object value;
for (Object keyValue : dummykeyValues) {
if (i++ % 2 == 0) {
key = (String) keyValue;
} else {
value = keyValue;
if (!key.equals(T.label)) {
ElementHelper.validateProperty(key, value);
this.sqlDialect.validateProperty(key, value);
}
}
}
this.tx().readWrite();
SchemaTable schemaTablePair = SchemaTable.from(this, label, this.getSqlDialect().getPublicSchema());
this.getSchemaManager().createEdgeIndex(schemaTablePair, dummykeyValues);
}
public long countVertices() {
this.tx().readWrite();
return countElements(true);
}
public long countEdges() {
this.tx().readWrite();
return countElements(false);
}
private long countElements(boolean returnVertices) {
long count = 0;
Set<String> tables = this.getSchemaManager().getAllTables().keySet();
for (String table : tables) {
SchemaTable schemaTable = SchemaTable.from(this, table, this.getSqlDialect().getPublicSchema());
if (returnVertices ? schemaTable.isVertexTable() : !schemaTable.isVertexTable()) {
StringBuilder sql = new StringBuilder("SELECT COUNT(1) FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
sql.append(schemaTable.getTable());
sql.append("\"");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
ResultSet rs = preparedStatement.executeQuery();
rs.next();
count += rs.getLong(1);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
return count;
}
boolean isImplementForeignKeys() {
return implementForeignKeys;
}
private Class<?> findSqlgDialect() {
try {
return Class.forName("org.umlg.sqlg.sql.dialect.PostgresDialect");
} catch (ClassNotFoundException e) {
}
try {
return Class.forName("org.umlg.sqlg.sql.dialect.MariaDbDialect");
} catch (ClassNotFoundException e) {
}
try {
return Class.forName("org.umlg.sqlg.sql.dialect.HsqldbDialect");
} catch (ClassNotFoundException e) {
}
throw new IllegalStateException("No sqlg dialect found!");
}
private <T extends Element> Iterable<T> elements(boolean returnVertices, final List<RecordId> elementIds) {
List<T> sqlgElements = new ArrayList<>();
if (elementIds.size() > 0) {
Map<SchemaTable, List<Long>> distinctTableIdMap = RecordId.normalizeIds(elementIds);
for (Map.Entry<SchemaTable, List<Long>> schemaTableListEntry : distinctTableIdMap.entrySet()) {
SchemaTable schemaTable = schemaTableListEntry.getKey();
String tableName = (returnVertices ? SchemaManager.VERTEX_PREFIX : SchemaManager.EDGE_PREFIX) + schemaTable.getTable();
if (this.getSchemaManager().getAllTables().containsKey(schemaTable.getSchema() + "." + tableName)) {
List<Long> schemaTableIds = schemaTableListEntry.getValue();
StringBuilder sql = new StringBuilder("SELECT * FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
if (returnVertices) {
sql.append(SchemaManager.VERTEX_PREFIX);
} else {
sql.append(SchemaManager.EDGE_PREFIX);
}
sql.append(schemaTable.getTable());
sql.append("\" WHERE ");
sql.append(this.sqlDialect.maybeWrapInQoutes("ID"));
sql.append(" IN (");
int count = 1;
for (Long id : schemaTableIds) {
sql.append(id.toString());
if (count++ < schemaTableIds.size()) {
sql.append(",");
}
}
sql.append(")");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (Statement statement = conn.createStatement()) {
statement.execute(sql.toString());
ResultSet resultSet = statement.getResultSet();
while (resultSet.next()) {
long id = resultSet.getLong("ID");
SqlgElement sqlgElement;
if (returnVertices) {
sqlgElement = SqlgVertex.of(this, id, schemaTable.getSchema(), schemaTable.getTable());
} else {
sqlgElement = new SqlgEdge(this, id, schemaTable.getSchema(), schemaTable.getTable());
}
sqlgElement.loadResultSet(resultSet);
sqlgElements.add((T) sqlgElement);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
} else {
//TODO use a union query
Set<String> tables = this.getSchemaManager().getAllTables().keySet();
for (String table : tables) {
SchemaTable schemaTable = SchemaTable.from(this, table, this.getSqlDialect().getPublicSchema());
if (returnVertices ? schemaTable.isVertexTable() : !schemaTable.isVertexTable()) {
StringBuilder sql = new StringBuilder("SELECT * FROM ");
sql.append("\"");
sql.append(schemaTable.getSchema());
sql.append("\".\"");
sql.append(schemaTable.getTable());
sql.append("\"");
if (this.getSqlDialect().needsSemicolon()) {
sql.append(";");
}
Connection conn = this.tx().getConnection();
if (logger.isDebugEnabled()) {
logger.debug(sql.toString());
}
try (Statement statement = conn.createStatement()) {
statement.execute(sql.toString());
ResultSet resultSet = statement.getResultSet();
while (resultSet.next()) {
long id = resultSet.getLong("ID");
SqlgElement sqlgElement;
if (returnVertices) {
sqlgElement = SqlgVertex.of(this, id, schemaTable.getSchema(), schemaTable.getTable().substring(SchemaManager.VERTEX_PREFIX.length()));
} else {
sqlgElement = new SqlgEdge(this, id, schemaTable.getSchema(), schemaTable.getTable().substring(SchemaManager.EDGE_PREFIX.length()));
}
sqlgElement.loadResultSet(resultSet);
sqlgElements.add((T) sqlgElement);
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
}
return sqlgElements;
}
public SqlgDataSource getSqlgDataSource() {
return sqlgDataSource;
}
public void drop() {
Connection conn = this.tx().getConnection();
try {
DatabaseMetaData metadata = conn.getMetaData();
String catalog = null;
String schemaPattern = null;
String tableNamePattern = "%";
String[] types = {"TABLE"};
ResultSet result = metadata.getTables(catalog, schemaPattern, tableNamePattern, types);
while (result.next()) {
String schema = result.getString(2);
String table = result.getString(3);
if (sqlDialect.getGisSchemas().contains(schema) || sqlDialect.getSpacialRefTable().contains(table)) {
continue;
}
StringBuilder sql = new StringBuilder("DROP TABLE ");
sql.append(sqlDialect.maybeWrapInQoutes(schema));
sql.append(".");
sql.append(sqlDialect.maybeWrapInQoutes(table));
sql.append(" CASCADE");
if (sqlDialect.needsSemicolon()) {
sql.append(";");
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
preparedStatement.executeUpdate();
}
}
catalog = null;
schemaPattern = null;
result = metadata.getSchemas(catalog, schemaPattern);
while (result.next()) {
String schema = result.getString(1);
if (!sqlDialect.getDefaultSchemas().contains(schema) && !sqlDialect.getGisSchemas().contains(schema)) {
StringBuilder sql = new StringBuilder("DROP SCHEMA ");
sql.append(sqlDialect.maybeWrapInQoutes(schema));
sql.append(" CASCADE");
if (sqlDialect.needsSemicolon()) {
sql.append(";");
}
try (PreparedStatement preparedStatement = conn.prepareStatement(sql.toString())) {
preparedStatement.executeUpdate();
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
}
| optout of tests to narrow travis failure
| sqlg-core/src/main/java/org/umlg/sqlg/structure/SqlgGraph.java | optout of tests to narrow travis failure | <ide><path>qlg-core/src/main/java/org/umlg/sqlg/structure/SqlgGraph.java
<ide> @Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT)
<ide> @Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_INTEGRATE)
<ide> @Graph.OptIn(Graph.OptIn.SUITE_GROOVY_ENVIRONMENT_PERFORMANCE)
<add>
<add>//These are to debug travis
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldReadWriteModernToFileWithHelpers",
<add> reason = "travis hangs.")
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldReadWriteClassic",
<add> reason = "travis hangs.")
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldReadWriteModern",
<add> reason = "travis hangs.")
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldReadWriteClassicToFileWithHelpers",
<add> reason = "travis hangs.")
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldMigrateModernGraph",
<add> reason = "travis hangs.")
<add>@Graph.OptOut(
<add> test = "org.apache.tinkerpop.gremlin.structure.io.IoGraphTest",
<add> method = "shouldMigrateClassicGraph",
<add> reason = "travis hangs.")
<add>
<add>
<add>
<ide>
<ide> @Graph.OptOut(
<ide> test = "org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.ExplainTest$Traversals", |
|
Java | agpl-3.0 | 6594c543caf9ab0e8733a81d49c9eade0faeb7c9 | 0 | geothomasp/kcmit,UniversityOfHawaiiORS/kc,ColostateResearchServices/kc,UniversityOfHawaiiORS/kc,jwillia/kc-old1,kuali/kc,jwillia/kc-old1,iu-uits-es/kc,jwillia/kc-old1,mukadder/kc,kuali/kc,ColostateResearchServices/kc,iu-uits-es/kc,geothomasp/kcmit,UniversityOfHawaiiORS/kc,geothomasp/kcmit,jwillia/kc-old1,geothomasp/kcmit,mukadder/kc,kuali/kc,geothomasp/kcmit,ColostateResearchServices/kc,iu-uits-es/kc,mukadder/kc | /*
* Copyright 2006-2008 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.proposaldevelopment.rules;
import static org.kuali.kra.logging.FormattedLogger.info;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.kuali.RiceKeyConstants;
import org.kuali.core.bo.BusinessObject;
import org.kuali.core.document.Document;
import org.kuali.core.service.DataDictionaryService;
import org.kuali.core.service.KualiConfigurationService;
import org.kuali.core.util.ErrorMap;
import org.kuali.core.util.GlobalVariables;
import org.kuali.kra.bo.ValidSpecialReviewApproval;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KeyConstants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.proposaldevelopment.bo.NarrativeUserRights;
import org.kuali.kra.proposaldevelopment.bo.ProposalAbstract;
import org.kuali.kra.proposaldevelopment.bo.ProposalCopyCriteria;
import org.kuali.kra.proposaldevelopment.bo.ProposalLocation;
import org.kuali.kra.proposaldevelopment.bo.ProposalPerson;
import org.kuali.kra.proposaldevelopment.bo.ProposalPersonYnq;
import org.kuali.kra.proposaldevelopment.bo.ProposalSpecialReview;
import org.kuali.kra.proposaldevelopment.bo.ProposalUser;
import org.kuali.kra.proposaldevelopment.bo.ProposalUserEditRoles;
import org.kuali.kra.proposaldevelopment.bo.ProposalYnq;
import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument;
import org.kuali.kra.proposaldevelopment.rule.AbstractsRule;
import org.kuali.kra.proposaldevelopment.rule.AddInstituteAttachmentRule;
import org.kuali.kra.proposaldevelopment.rule.AddKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.AddNarrativeRule;
import org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentRule;
import org.kuali.kra.proposaldevelopment.rule.AddProposalLocationRule;
import org.kuali.kra.proposaldevelopment.rule.AddProposalSpecialReviewRule;
import org.kuali.kra.proposaldevelopment.rule.CalculateCreditSplitRule;
import org.kuali.kra.proposaldevelopment.rule.ChangeKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.CopyProposalRule;
import org.kuali.kra.proposaldevelopment.rule.NewNarrativeUserRightsRule;
import org.kuali.kra.proposaldevelopment.rule.PermissionsRule;
import org.kuali.kra.proposaldevelopment.rule.ProposalDataOverrideRule;
import org.kuali.kra.proposaldevelopment.rule.SaveKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.SaveNarrativesRule;
import org.kuali.kra.proposaldevelopment.rule.event.AddInstituteAttachmentEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddNarrativeEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddProposalLocationEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddProposalSpecialReviewEvent;
import org.kuali.kra.proposaldevelopment.rule.event.ChangeKeyPersonEvent;
import org.kuali.kra.proposaldevelopment.rule.event.ProposalDataOverrideEvent;
import org.kuali.kra.proposaldevelopment.rule.event.SaveNarrativesEvent;
import org.kuali.kra.proposaldevelopment.rule.event.SavePersonnelAttachmentEvent;
import org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService;
import org.kuali.kra.proposaldevelopment.web.bean.ProposalUserRoles;
import org.kuali.kra.rule.CustomAttributeRule;
import org.kuali.kra.rule.event.SaveCustomAttributeEvent;
import org.kuali.kra.rules.KraCustomAttributeRule;
import org.kuali.kra.rules.ResearchDocumentRuleBase;
/**
* Main Business Rule class for <code>{@link ProposalDevelopmentDocument}</code>. Responsible for delegating rules to independent rule classes.
*
* @see org.kuali.proposaldevelopment.rules.KeyPersonnelAuditRule
* @see org.kuali.proposaldevelopment.rules.PersonEditableFieldRule
* @see org.kuali.proposaldevelopment.rules.ProposalDevelopmentKeyPersonsRule
* @author Kuali Nervous System Team ([email protected])
*/
public class ProposalDevelopmentDocumentRule extends ResearchDocumentRuleBase implements AddKeyPersonRule, AddNarrativeRule,SaveNarrativesRule, AddInstituteAttachmentRule, AddPersonnelAttachmentRule, AddProposalLocationRule,AddProposalSpecialReviewRule , AbstractsRule, CopyProposalRule, ChangeKeyPersonRule, PermissionsRule, CustomAttributeRule, NewNarrativeUserRightsRule, SaveKeyPersonRule,CalculateCreditSplitRule, ProposalDataOverrideRule {
@Override
protected boolean processCustomRouteDocumentBusinessRules(Document document) {
boolean retval = true;
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
retval &= super.processCustomRouteDocumentBusinessRules(document);
retval &= processProposalPersonYNQBusinessRule(proposalDevelopmentDocument);
return retval;
}
@Override
protected boolean processCustomSaveDocumentBusinessRules(Document document) {
if (!(document instanceof ProposalDevelopmentDocument)) {
return false;
}
boolean valid = true;
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
GlobalVariables.getErrorMap().addToErrorPath("document");
// KRACOEUS-641: Changed CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME to false to prevent duplicate error messages
final boolean VALIDATION_REQUIRED = true;
final boolean CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME = false;
getDictionaryValidationService().validateDocumentAndUpdatableReferencesRecursively(document, getMaxDictionaryValidationDepth(), VALIDATION_REQUIRED, CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME);
valid &= processProposalRequiredFieldsBusinessRule(proposalDevelopmentDocument);
valid &= processOrganizationLocationBusinessRule(proposalDevelopmentDocument);
valid &= processSpecialReviewBusinessRule(proposalDevelopmentDocument);
valid &= processProposalYNQBusinessRule(proposalDevelopmentDocument, false);
valid &= processBudgetVersionsBusinessRule(proposalDevelopmentDocument.getBudgetVersionOverviews(), false);
valid &= processProposalGrantsGovBusinessRule(proposalDevelopmentDocument);
valid &= processSponsorProgramBusinessRule(proposalDevelopmentDocument);
GlobalVariables.getErrorMap().removeFromErrorPath("document");
return valid;
}
/**
* This method validates 'Proposal Special review'. It checks
* validSpecialReviewApproval table, and if there is a match, then checks
* protocalnumberflag, applicationdateflag, and approvaldataflag.
*
* @param proposalDevelopmentDocument : The proposalDevelopmentDocument that is being validated
* @return valid Does the validation pass
*/
private boolean processSpecialReviewBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
for (ProposalSpecialReview propSpecialReview : proposalDevelopmentDocument.getPropSpecialReviews()) {
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
propSpecialReview.refreshReferenceObject("validSpecialReviewApproval");
if (StringUtils.isNotBlank(propSpecialReview.getApprovalTypeCode()) && StringUtils.isNotBlank(propSpecialReview.getSpecialReviewCode())) {
ValidSpecialReviewApproval validSpRevApproval = propSpecialReview.getValidSpecialReviewApproval();
if (validSpRevApproval != null) {
if (validSpRevApproval.isProtocolNumberFlag() && StringUtils.isBlank(propSpecialReview.getProtocolNumber())) {
valid = false;
errorMap.putError("protocolNumber", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Protocol Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isApplicationDateFlag() && propSpecialReview.getApplicationDate() == null) {
valid = false;
errorMap.putError("applicationDate", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW,
"Protocol Number", validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isApprovalDateFlag() && propSpecialReview.getApprovalDate() == null) {
valid = false;
errorMap.putError("approvalDate", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Protocol Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isExemptNumberFlag() && (propSpecialReview.getProposalExemptNumbers() == null || propSpecialReview.getProposalExemptNumbers().size() < 1)) {
valid = false;
errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
errorMap.removeFromErrorPath("document");
errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Exempt Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
errorMap.addToErrorPath("document");
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
}
if (!validSpRevApproval.isExemptNumberFlag() && propSpecialReview.getProposalExemptNumbers() != null && propSpecialReview.getProposalExemptNumbers().size() > 0) {
valid = false;
errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
errorMap.removeFromErrorPath("document");
errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_EXEMPT_NUMBER_SELECTED,
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
errorMap.addToErrorPath("document");
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
}
} else {
// TODO : not sure if no valid sp set, and exempt# is selected, should this be an error ?
// if (propSpecialReview.getProposalExemptNumbers() != null && propSpecialReview.getProposalExemptNumbers().size() > 0) {
// valid = false;
// errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
// errorMap.removeFromErrorPath("document");
// propSpecialReview.refreshReferenceObject("specialReview");
// propSpecialReview.refreshReferenceObject("specialReviewApprovalType");
// errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_EXEMPT_NUMBER_SELECTED,
// propSpecialReview.getSpecialReview().getDescription() + "/"
// + propSpecialReview.getSpecialReviewApprovalType().getDescription());
// errorMap.addToErrorPath("document");
// errorMap.addToErrorPath("propSpecialReview[" + i + "]");
// }
}
}
if (propSpecialReview.getApplicationDate() !=null && propSpecialReview.getApprovalDate() != null && propSpecialReview.getApprovalDate().before(propSpecialReview.getApplicationDate())) {
errorMap.putError("approvalDate", KeyConstants.ERROR_APPROVAL_DATE_BEFORE_APPLICATION_DATE_SPECIALREVIEW,
"Approval Date","Application Date");
}
errorMap.removeFromErrorPath("propSpecialReview[" + i++ + "]");
}
return valid;
}
/**
*
* Validate proposal person questions rule.
* Answers are mandatory for routing
* @param proposalDevelopmentDocument
* @return
*/
public boolean processProposalPersonYNQBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
//checkErrors();
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
List<ProposalPerson> proposalPersons = proposalDevelopmentDocument.getInvestigators();
for (ProposalPerson proposalPerson : proposalPersons) {
List<ProposalPersonYnq> proposalPersonYnqs = proposalPerson.getProposalPersonYnqs();
String errorPath = "proposalPerson[" + i + "]";
errorMap.addToErrorPath(errorPath);
for (ProposalPersonYnq proposalPersonYnq : proposalPersonYnqs) {
/* look for answers - required for routing */
if(StringUtils.isBlank(proposalPersonYnq.getAnswer())) {
valid = false;
errorMap.putError("answer", KeyConstants.ERROR_REQUIRED_ANSWER);
}
}
errorMap.removeFromErrorPath(errorPath);
i++;
}
return valid;
}
/**
*
* Validate proposal questions rule. validate explanation required and date required fields based on
* question configuration. Answers are mandatory for routing
* @param proposalDevelopmentDocument
* @return
*/
public boolean processProposalYNQBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument, boolean docRouting) {
boolean valid = true;
//checkErrors();
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
if(!errorMap.getErrorPath().contains("document")) {
errorMap.clearErrorPath();
errorMap.addToErrorPath("document");
}
for (ProposalYnq proposalYnq : proposalDevelopmentDocument.getProposalYnqs()) {
String groupName = proposalYnq.getYnq().getGroupName();
String errorPath = "proposalYnq[" + groupName + "][" + i + "]";
errorMap.addToErrorPath(errorPath);
/* look for answers - required for routing */
if(docRouting && StringUtils.isBlank(proposalYnq.getAnswer())) {
valid = false;
errorMap.putError("answer", KeyConstants.ERROR_REQUIRED_ANSWER);
}
/* look for date requried */
if (StringUtils.isNotBlank(proposalYnq.getAnswer()) &&
proposalYnq.getAnswer().equalsIgnoreCase(proposalYnq.getYnq().getDateRequiredFor()) &&
proposalYnq.getReviewDate() == null
) {
valid = false;
errorMap.putError("reviewDate", KeyConstants.ERROR_REQUIRED_FOR_REVIEW_DATE);
}
/* look for explanation requried */
if (StringUtils.isNotBlank(proposalYnq.getAnswer()) &&
proposalYnq.getAnswer().equalsIgnoreCase(proposalYnq.getYnq().getExplanationRequiredFor()) &&
StringUtils.isBlank(proposalYnq.getExplanation())
) {
valid = false;
errorMap.putError("explanation", KeyConstants.ERROR_REQUIRED_FOR_EXPLANATION);
}
errorMap.removeFromErrorPath(errorPath);
i++;
}
return valid;
}
/**
* This method validates Required Fields related fields on
* the Proposal Development Document.
* @param proposalDevelopmentDocument document to validate
* @return boolean whether the validation passed or not
*/
private boolean processProposalRequiredFieldsBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
valid = validateProposalTypeField(proposalDevelopmentDocument);
proposalDevelopmentDocument.refreshReferenceObject("sponsor");
if (proposalDevelopmentDocument.getSponsorCode() != null && proposalDevelopmentDocument.getSponsor() == null) {
valid = false;
errorMap.putError("sponsorCode", KeyConstants.ERROR_MISSING, dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "sponsorCode"));
}
//if either is missing, it should be caught on the DD validation.
if (proposalDevelopmentDocument.getRequestedStartDateInitial() != null && proposalDevelopmentDocument.getRequestedEndDateInitial() != null) {
if (proposalDevelopmentDocument.getRequestedStartDateInitial().after(proposalDevelopmentDocument.getRequestedEndDateInitial())) {
valid = false;
errorMap.putError("requestedStartDateInitial", KeyConstants.ERROR_START_DATE_AFTER_END_DATE,
new String[] {dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "requestedStartDateInitial"),
dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "requestedEndDateInitial")});
}
}
return valid;
}
/**
* Validates business rules pertaining to the Proposal Type. The rules are:
*
* <ol>
* <li>If the Proposal Type is Renewal, Revision, or Continuation, then the
* Sponsor Proposal Id field must be assigned a value.</li>
* </ol>
*
* @param proposalDevelopmentDocument the Proposal Development Document
* @return true if valid; otherwise false (if false, the Global ErrorMap is populated)
*/
private boolean validateProposalTypeField(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
String proposalTypeCode = proposalDevelopmentDocument.getProposalTypeCode();
String sponsorProposalId = proposalDevelopmentDocument.getSponsorProposalNumber();
if (isProposalTypeRenewalRevisionContinuation(proposalTypeCode) && StringUtils.isEmpty(sponsorProposalId)) {
valid = false;
errorMap.putError("sponsorProposalNumber", KeyConstants.ERROR_REQUIRED_PROPOSAL_SPONSOR_ID, dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "sponsorProposalNumber"));
}
// TODO: Must add in other validations regarding awards, etc. see KRACOEUS-290.
return valid;
}
/**
* Is the Proposal Type set to Renewal, Revision, or a Continuation?
* @param proposalTypeCode proposal type code
* @return true or false
*/
private boolean isProposalTypeRenewalRevisionContinuation(String proposalTypeCode) {
String proposalTypeCodeRenewal = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_RENEWAL).getParameterValue();
String proposalTypeCodeRevision = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_REVISION).getParameterValue();
String proposalTypeCodeContinuation = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_CONTINUATION).getParameterValue();
return !StringUtils.isEmpty(proposalTypeCode) &&
(proposalTypeCode.equals(proposalTypeCodeRenewal) ||
proposalTypeCode.equals(proposalTypeCodeRevision) ||
proposalTypeCode.equals(proposalTypeCodeContinuation));
}
/**
*
* Validate organization/location rule. specifically, at least one location is required.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processOrganizationLocationBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
if (proposalDevelopmentDocument.getOrganizationId()!=null && (proposalDevelopmentDocument.getProposalLocations().size()==0 ||
(proposalDevelopmentDocument.getProposalLocations().size()==1 && ((ProposalLocation)(proposalDevelopmentDocument.getProposalLocations().get(0))).getLocationSequenceNumber()==null))) {
GlobalVariables.getErrorMap().removeFromErrorPath("document");
reportError("newPropLocation.location", KeyConstants.ERROR_REQUIRED_FOR_PROPLOCATION);
GlobalVariables.getErrorMap().addToErrorPath("document");
valid = false;
}
return valid;
}
/**
*
* Validate Grants.gov business rules.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processProposalGrantsGovBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
if(proposalDevelopmentDocument.getS2sOpportunity()!= null && proposalDevelopmentDocument.getS2sOpportunity().getOpportunityId()!=null && StringUtils.equalsIgnoreCase(proposalDevelopmentDocument.getS2sOpportunity().getRevisionCode(), getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.S2S_REVISIONTYPE_OTHER).getParameterValue()) && (proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription()==null||StringUtils.equals(proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription().trim(), ""))){
reportError("s2sOpportunity.revisionOtherDescription",KeyConstants.ERROR_IF_REVISIONTYPE_IS_OTHER);
valid &= false;
}
if(proposalDevelopmentDocument.getS2sOpportunity()!= null && proposalDevelopmentDocument.getS2sOpportunity().getOpportunityId()!=null && !StringUtils.equalsIgnoreCase(proposalDevelopmentDocument.getS2sOpportunity().getRevisionCode(), getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT, KeyConstants.S2S_REVISIONTYPE_OTHER).getParameterValue()) && (proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription()!=null && !StringUtils.equals(proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription().trim(), ""))){
reportError("s2sOpportunity.revisionOtherDescription",KeyConstants.ERROR_IF_REVISIONTYPE_IS_NOT_OTHER_SPECIFY_NOT_BLANK);
valid &= false;
}
return valid;
}
public boolean processAddKeyPersonBusinessRules(ProposalDevelopmentDocument document, ProposalPerson person) {
return new ProposalDevelopmentKeyPersonsRule().processAddKeyPersonBusinessRules(document, person);
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processSponsorProgramBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
String regExpr = "(\\d{2})(\\.)(\\d{3})[a-zA-z]?";
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
if(StringUtils.isNotBlank(proposalDevelopmentDocument.getCfdaNumber()) && !(proposalDevelopmentDocument.getCfdaNumber().matches(regExpr)) && GlobalVariables.getErrorMap().getMessages("document.cfdaNumber") == null)
{
errorMap.putError("cfdaNumber", RiceKeyConstants.ERROR_INVALID_FORMAT, new String []{dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "cfdaNumber"), proposalDevelopmentDocument.getCfdaNumber() });
valid = false;
}
return valid;
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.AddNarrativeRule#processAddNarrativeBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument,org.kuali.kra.proposaldevelopment.bo.Narrative)
*/
public boolean processAddNarrativeBusinessRules(AddNarrativeEvent addNarrativeEvent) {
return new ProposalDevelopmentNarrativeRule().processAddNarrativeBusinessRules(addNarrativeEvent); }
/**
* @see org.kuali.core.rule.DocumentAuditRule#processRunAuditBusinessRules(org.kuali.core.document.Document)
*/
public boolean processRunAuditBusinessRules(Document document){
boolean retval = true;
retval &= super.processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentProposalRequiredFieldsAuditRule().processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentSponsorProgramInformationAuditRule().processRunAuditBusinessRules(document);
retval &= new KeyPersonnelAuditRule().processRunAuditBusinessRules(document);
//Change for KRACOEUS-1403
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
proposalDevelopmentDocument.getYnqService().populateProposalQuestions(proposalDevelopmentDocument.getProposalYnqs(), proposalDevelopmentDocument.getYnqGroupNames(), proposalDevelopmentDocument);
processProposalYNQBusinessRule((ProposalDevelopmentDocument) document, true);
retval &= new ProposalDevelopmentYnqAuditRule().processRunAuditBusinessRules(document);
//Change for KRACOEUS-1403 ends here
retval &= new ProposalSpecialReviewAuditRule().processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentGrantsGovAuditRule().processRunAuditBusinessRules(document);
// audit check for budgetversion with final status
try {
retval &= KraServiceLocator.getService(ProposalDevelopmentService.class).validateBudgetAuditRule((ProposalDevelopmentDocument)document);
} catch (Exception ex) {
// TODO : should log it here
throw new RuntimeException("Validate Budget Audit rules encountered exception", ex);
}
return retval;
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.AbstractsRule#processAddAbstractBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, org.kuali.kra.proposaldevelopment.bo.ProposalAbstract)
*/
public boolean processAddAbstractBusinessRules(ProposalDevelopmentDocument document, ProposalAbstract proposalAbstract) {
return new ProposalDevelopmentAbstractsRule().processAddAbstractBusinessRules(document, proposalAbstract);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.SaveNarrativesRule#processSaveNarrativesBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument)
*/
public boolean processSaveNarrativesBusinessRules(SaveNarrativesEvent saveNarrativesEvent) {
return new ProposalDevelopmentNarrativeRule().processSaveNarrativesBusinessRules(saveNarrativesEvent);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.CopyProposalRule#processCopyProposalBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, org.kuali.kra.proposaldevelopment.bo.ProposalCopyCriteria)
*/
public boolean processCopyProposalBusinessRules(ProposalDevelopmentDocument document, ProposalCopyCriteria criteria) {
return new ProposalDevelopmentCopyRule().processCopyProposalBusinessRules(document, criteria);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddInstituteAttachmentRule#processAddInstituteAttachmentBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddInstituteAttachmentEvent)
*/
public boolean processAddInstituteAttachmentBusinessRules(AddInstituteAttachmentEvent addInstituteAttachmentEvent) {
return new ProposalDevelopmentInstituteAttachmentRule().processAddInstituteAttachmentBusinessRules(addInstituteAttachmentEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentsRule#processAddPersonnelAttachmentsBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentsEvent)
*/
public boolean processAddPersonnelAttachmentBusinessRules(AddPersonnelAttachmentEvent addPersonnelAttachmentEvent) {
return new ProposalDevelopmentPersonnelAttachmentRule().processAddPersonnelAttachmentBusinessRules(addPersonnelAttachmentEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentsRule#processAddPersonnelAttachmentsBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentsEvent)
*/
public boolean processSavePersonnelAttachmentBusinessRules(SavePersonnelAttachmentEvent savePersonnelAttachmentEvent) {
return new ProposalDevelopmentPersonnelAttachmentRule().processSavePersonnelAttachmentBusinessRules(savePersonnelAttachmentEvent);
}
/**
* Delegating method for the <code>{@link ChangeKeyPersonRule}</code> which is triggered by the <code>{@link ChangeKeyPersonEvent}</code>
*
* @see org.kuali.kra.proposaldevelopment.rule.ChangeKeyPersonRule#processChangeKeyPersonBusinessRules(org.kuali.kra.proposaldevelopment.bo.ProposalPerson, org.kuali.core.bo.BusinessObject)
*/
public boolean processChangeKeyPersonBusinessRules(ProposalPerson proposalPerson, BusinessObject source,int index) {
return new ProposalDevelopmentKeyPersonsRule().processChangeKeyPersonBusinessRules(proposalPerson, source,index);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddProposalLocationRule#processAddProposalLocationBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddProposalLocationEvent)
*/
public boolean processAddProposalLocationBusinessRules(AddProposalLocationEvent addProposalLocationEvent) {
return new ProposalDevelopmentProposalLocationRule().processAddProposalLocationBusinessRules(addProposalLocationEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddProposalSpecialReviewRule#processAddProposalSpecialReviewBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddProposalSpecialReviewEvent)
*/
public boolean processAddProposalSpecialReviewBusinessRules(AddProposalSpecialReviewEvent addProposalSpecialReviewEvent) {
return new ProposalDevelopmentProposalSpecialReviewRule().processAddProposalSpecialReviewBusinessRules(addProposalSpecialReviewEvent);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processAddProposalUserBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, org.kuali.kra.proposaldevelopment.bo.ProposalUser)
*/
public boolean processAddProposalUserBusinessRules(ProposalDevelopmentDocument document,List<ProposalUserRoles> list, ProposalUser proposalUser) {
return new ProposalDevelopmentPermissionsRule().processAddProposalUserBusinessRules(document, list, proposalUser);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processDeleteProposalUserBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, int)
*/
public boolean processDeleteProposalUserBusinessRules(ProposalDevelopmentDocument document,List<ProposalUserRoles> list, int index) {
return new ProposalDevelopmentPermissionsRule().processDeleteProposalUserBusinessRules(document, list, index);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processEditProposalUserRolesBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, org.kuali.kra.proposaldevelopment.bo.ProposalUserEditRoles)
*/
public boolean processEditProposalUserRolesBusinessRules(ProposalDevelopmentDocument document, List<ProposalUserRoles> list, ProposalUserEditRoles editRoles) {
return new ProposalDevelopmentPermissionsRule().processEditProposalUserRolesBusinessRules(document, list, editRoles);
}
/**
* Delegate to {@link org.kuali.kra.proposaldevelopment.rules.ProposalDevelopmentKeyPersonsRule#processSaveKeyPersonBusinessRules(ProposalDevelopmentDocument)
*
* @see org.kuali.kra.proposaldevelopment.rule.SaveKeyPersonRule#processSaveKeyPersonBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument)
*/
public boolean processSaveKeyPersonBusinessRules(ProposalDevelopmentDocument document) {
info("In processSaveKeyPersonBusinessRules()");
return new ProposalDevelopmentKeyPersonsRule().processCustomSaveDocumentBusinessRules(document);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.NewNarrativeUserRightsRule#processNewNarrativeUserRightsBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, int)
*/
public boolean processNewNarrativeUserRightsBusinessRules(ProposalDevelopmentDocument document,
List<NarrativeUserRights> newNarrativeUserRights, int narrativeIndex) {
return new ProposalDevelopmentNarrativeRule().processNewNarrativeUserRightsBusinessRules(document, newNarrativeUserRights, narrativeIndex);
}
public boolean processCustomAttributeRules(SaveCustomAttributeEvent saveCustomAttributeEvent) {
return new KraCustomAttributeRule().processCustomAttributeRules(saveCustomAttributeEvent);
}
protected KualiConfigurationService getKualiConfigurationService(){
return KraServiceLocator.getService(KualiConfigurationService.class);
}
public boolean processCalculateCreditSplitBusinessRules(ProposalDevelopmentDocument document) {
// TODO Auto-generated method stub
return new ProposalDevelopmentKeyPersonsRule().processCalculateCreditSplitBusinessRules(document);
}
public boolean processProposalDataOverrideRules(ProposalDataOverrideEvent proposalDataOverrideEvent) {
return new ProposalDevelopmentDataOverrideRule().processProposalDataOverrideRules(proposalDataOverrideEvent);
}
}
| src/main/java/org/kuali/kra/proposaldevelopment/rules/ProposalDevelopmentDocumentRule.java | /*
* Copyright 2006-2008 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.proposaldevelopment.rules;
import static org.kuali.kra.logging.FormattedLogger.info;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.kuali.RiceKeyConstants;
import org.kuali.core.bo.BusinessObject;
import org.kuali.core.document.Document;
import org.kuali.core.service.DataDictionaryService;
import org.kuali.core.service.KualiConfigurationService;
import org.kuali.core.util.ErrorMap;
import org.kuali.core.util.GlobalVariables;
import org.kuali.kra.bo.ValidSpecialReviewApproval;
import org.kuali.kra.infrastructure.Constants;
import org.kuali.kra.infrastructure.KeyConstants;
import org.kuali.kra.infrastructure.KraServiceLocator;
import org.kuali.kra.proposaldevelopment.bo.NarrativeUserRights;
import org.kuali.kra.proposaldevelopment.bo.ProposalAbstract;
import org.kuali.kra.proposaldevelopment.bo.ProposalCopyCriteria;
import org.kuali.kra.proposaldevelopment.bo.ProposalLocation;
import org.kuali.kra.proposaldevelopment.bo.ProposalPerson;
import org.kuali.kra.proposaldevelopment.bo.ProposalPersonYnq;
import org.kuali.kra.proposaldevelopment.bo.ProposalSpecialReview;
import org.kuali.kra.proposaldevelopment.bo.ProposalUser;
import org.kuali.kra.proposaldevelopment.bo.ProposalUserEditRoles;
import org.kuali.kra.proposaldevelopment.bo.ProposalYnq;
import org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument;
import org.kuali.kra.proposaldevelopment.rule.AbstractsRule;
import org.kuali.kra.proposaldevelopment.rule.AddInstituteAttachmentRule;
import org.kuali.kra.proposaldevelopment.rule.AddKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.AddNarrativeRule;
import org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentRule;
import org.kuali.kra.proposaldevelopment.rule.AddProposalLocationRule;
import org.kuali.kra.proposaldevelopment.rule.AddProposalSpecialReviewRule;
import org.kuali.kra.proposaldevelopment.rule.CalculateCreditSplitRule;
import org.kuali.kra.proposaldevelopment.rule.ChangeKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.CopyProposalRule;
import org.kuali.kra.proposaldevelopment.rule.NewNarrativeUserRightsRule;
import org.kuali.kra.proposaldevelopment.rule.PermissionsRule;
import org.kuali.kra.proposaldevelopment.rule.ProposalDataOverrideRule;
import org.kuali.kra.proposaldevelopment.rule.SaveKeyPersonRule;
import org.kuali.kra.proposaldevelopment.rule.SaveNarrativesRule;
import org.kuali.kra.proposaldevelopment.rule.event.AddInstituteAttachmentEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddNarrativeEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddProposalLocationEvent;
import org.kuali.kra.proposaldevelopment.rule.event.AddProposalSpecialReviewEvent;
import org.kuali.kra.proposaldevelopment.rule.event.ChangeKeyPersonEvent;
import org.kuali.kra.proposaldevelopment.rule.event.ProposalDataOverrideEvent;
import org.kuali.kra.proposaldevelopment.rule.event.SaveNarrativesEvent;
import org.kuali.kra.proposaldevelopment.rule.event.SavePersonnelAttachmentEvent;
import org.kuali.kra.proposaldevelopment.service.ProposalDevelopmentService;
import org.kuali.kra.proposaldevelopment.web.bean.ProposalUserRoles;
import org.kuali.kra.rule.CustomAttributeRule;
import org.kuali.kra.rule.event.SaveCustomAttributeEvent;
import org.kuali.kra.rules.KraCustomAttributeRule;
import org.kuali.kra.rules.ResearchDocumentRuleBase;
/**
* Main Business Rule class for <code>{@link ProposalDevelopmentDocument}</code>. Responsible for delegating rules to independent rule classes.
*
* @see org.kuali.proposaldevelopment.rules.KeyPersonnelAuditRule
* @see org.kuali.proposaldevelopment.rules.PersonEditableFieldRule
* @see org.kuali.proposaldevelopment.rules.ProposalDevelopmentKeyPersonsRule
* @author Kuali Nervous System Team ([email protected])
*/
public class ProposalDevelopmentDocumentRule extends ResearchDocumentRuleBase implements AddKeyPersonRule, AddNarrativeRule,SaveNarrativesRule, AddInstituteAttachmentRule, AddPersonnelAttachmentRule, AddProposalLocationRule,AddProposalSpecialReviewRule , AbstractsRule, CopyProposalRule, ChangeKeyPersonRule, PermissionsRule, CustomAttributeRule, NewNarrativeUserRightsRule, SaveKeyPersonRule,CalculateCreditSplitRule, ProposalDataOverrideRule {
@Override
protected boolean processCustomRouteDocumentBusinessRules(Document document) {
boolean retval = true;
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
retval &= super.processCustomRouteDocumentBusinessRules(document);
retval &= processProposalPersonYNQBusinessRule(proposalDevelopmentDocument);
return retval;
}
@Override
protected boolean processCustomSaveDocumentBusinessRules(Document document) {
if (!(document instanceof ProposalDevelopmentDocument)) {
return false;
}
boolean valid = true;
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
GlobalVariables.getErrorMap().addToErrorPath("document");
// KRACOEUS-641: Changed CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME to false to prevent duplicate error messages
final boolean VALIDATION_REQUIRED = true;
final boolean CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME = false;
getDictionaryValidationService().validateDocumentAndUpdatableReferencesRecursively(document, getMaxDictionaryValidationDepth(), VALIDATION_REQUIRED, CHOMP_LAST_LETTER_S_FROM_COLLECTION_NAME);
valid &= processProposalRequiredFieldsBusinessRule(proposalDevelopmentDocument);
valid &= processOrganizationLocationBusinessRule(proposalDevelopmentDocument);
valid &= processSpecialReviewBusinessRule(proposalDevelopmentDocument);
valid &= processProposalYNQBusinessRule(proposalDevelopmentDocument, false);
valid &= processBudgetVersionsBusinessRule(proposalDevelopmentDocument.getBudgetVersionOverviews(), false);
valid &= processProposalGrantsGovBusinessRule(proposalDevelopmentDocument);
valid &= processSponsorProgramBusinessRule(proposalDevelopmentDocument);
GlobalVariables.getErrorMap().removeFromErrorPath("document");
return valid;
}
/**
* This method validates 'Proposal Special review'. It checks
* validSpecialReviewApproval table, and if there is a match, then checks
* protocalnumberflag, applicationdateflag, and approvaldataflag.
*
* @param proposalDevelopmentDocument : The proposalDevelopmentDocument that is being validated
* @return valid Does the validation pass
*/
private boolean processSpecialReviewBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
for (ProposalSpecialReview propSpecialReview : proposalDevelopmentDocument.getPropSpecialReviews()) {
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
propSpecialReview.refreshReferenceObject("validSpecialReviewApproval");
if (StringUtils.isNotBlank(propSpecialReview.getApprovalTypeCode()) && StringUtils.isNotBlank(propSpecialReview.getSpecialReviewCode())) {
ValidSpecialReviewApproval validSpRevApproval = propSpecialReview.getValidSpecialReviewApproval();
if (validSpRevApproval != null) {
if (validSpRevApproval.isProtocolNumberFlag() && StringUtils.isBlank(propSpecialReview.getProtocolNumber())) {
valid = false;
errorMap.putError("protocolNumber", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Protocol Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isApplicationDateFlag() && propSpecialReview.getApplicationDate() == null) {
valid = false;
errorMap.putError("applicationDate", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW,
"Protocol Number", validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isApprovalDateFlag() && propSpecialReview.getApprovalDate() == null) {
valid = false;
errorMap.putError("approvalDate", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Protocol Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
}
if (validSpRevApproval.isExemptNumberFlag() && (propSpecialReview.getProposalExemptNumbers() == null || propSpecialReview.getProposalExemptNumbers().size() < 1)) {
valid = false;
errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
errorMap.removeFromErrorPath("document");
errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_REQUIRED_FOR_VALID_SPECIALREVIEW, "Exempt Number",
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
errorMap.addToErrorPath("document");
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
}
if (!validSpRevApproval.isExemptNumberFlag() && propSpecialReview.getProposalExemptNumbers() != null && propSpecialReview.getProposalExemptNumbers().size() > 0) {
valid = false;
errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
errorMap.removeFromErrorPath("document");
errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_EXEMPT_NUMBER_SELECTED,
validSpRevApproval.getSpecialReview().getDescription() + "/"
+ validSpRevApproval.getSpecialReviewApprovalType().getDescription());
errorMap.addToErrorPath("document");
errorMap.addToErrorPath("propSpecialReview[" + i + "]");
}
} else {
// TODO : not sure if no valid sp set, and exempt# is selected, should this be an error ?
// if (propSpecialReview.getProposalExemptNumbers() != null && propSpecialReview.getProposalExemptNumbers().size() > 0) {
// valid = false;
// errorMap.removeFromErrorPath("propSpecialReview[" + i + "]");
// errorMap.removeFromErrorPath("document");
// propSpecialReview.refreshReferenceObject("specialReview");
// propSpecialReview.refreshReferenceObject("specialReviewApprovalType");
// errorMap.putError("documentExemptNumbers[" + i + "]", KeyConstants.ERROR_EXEMPT_NUMBER_SELECTED,
// propSpecialReview.getSpecialReview().getDescription() + "/"
// + propSpecialReview.getSpecialReviewApprovalType().getDescription());
// errorMap.addToErrorPath("document");
// errorMap.addToErrorPath("propSpecialReview[" + i + "]");
// }
}
}
if (propSpecialReview.getApplicationDate() !=null && propSpecialReview.getApprovalDate() != null && propSpecialReview.getApprovalDate().before(propSpecialReview.getApplicationDate())) {
errorMap.putError("approvalDate", KeyConstants.ERROR_APPROVAL_DATE_BEFORE_APPLICATION_DATE_SPECIALREVIEW,
"Approval Date","Application Date");
}
errorMap.removeFromErrorPath("propSpecialReview[" + i++ + "]");
}
return valid;
}
/**
*
* Validate proposal person questions rule.
* Answers are mandatory for routing
* @param proposalDevelopmentDocument
* @return
*/
public boolean processProposalPersonYNQBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
//checkErrors();
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
List<ProposalPerson> proposalPersons = proposalDevelopmentDocument.getInvestigators();
for (ProposalPerson proposalPerson : proposalPersons) {
List<ProposalPersonYnq> proposalPersonYnqs = proposalPerson.getProposalPersonYnqs();
String errorPath = "proposalPerson[" + i + "]";
errorMap.addToErrorPath(errorPath);
for (ProposalPersonYnq proposalPersonYnq : proposalPersonYnqs) {
/* look for answers - required for routing */
if(StringUtils.isBlank(proposalPersonYnq.getAnswer())) {
valid = false;
errorMap.putError("answer", KeyConstants.ERROR_REQUIRED_ANSWER);
}
}
errorMap.removeFromErrorPath(errorPath);
i++;
}
return valid;
}
/**
*
* Validate proposal questions rule. validate explanation required and date required fields based on
* question configuration. Answers are mandatory for routing
* @param proposalDevelopmentDocument
* @return
*/
public boolean processProposalYNQBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument, boolean docRouting) {
boolean valid = true;
//checkErrors();
ErrorMap errorMap = GlobalVariables.getErrorMap();
int i = 0;
if(!errorMap.getErrorPath().contains("document")) {
errorMap.clearErrorPath();
errorMap.addToErrorPath("document");
}
for (ProposalYnq proposalYnq : proposalDevelopmentDocument.getProposalYnqs()) {
String groupName = proposalYnq.getYnq().getGroupName();
String errorPath = "proposalYnq[" + groupName + "][" + i + "]";
errorMap.addToErrorPath(errorPath);
/* look for answers - required for routing */
if(docRouting && StringUtils.isBlank(proposalYnq.getAnswer())) {
valid = false;
errorMap.putError("answer", KeyConstants.ERROR_REQUIRED_ANSWER);
}
/* look for date requried */
if (StringUtils.isNotBlank(proposalYnq.getAnswer()) &&
proposalYnq.getAnswer().equalsIgnoreCase(proposalYnq.getYnq().getDateRequiredFor()) &&
proposalYnq.getReviewDate() == null
) {
valid = false;
errorMap.putError("reviewDate", KeyConstants.ERROR_REQUIRED_FOR_REVIEW_DATE);
}
/* look for explanation requried */
if (StringUtils.isNotBlank(proposalYnq.getAnswer()) &&
proposalYnq.getAnswer().equalsIgnoreCase(proposalYnq.getYnq().getExplanationRequiredFor()) &&
StringUtils.isBlank(proposalYnq.getExplanation())
) {
valid = false;
errorMap.putError("explanation", KeyConstants.ERROR_REQUIRED_FOR_EXPLANATION);
}
errorMap.removeFromErrorPath(errorPath);
i++;
}
return valid;
}
/**
* This method validates Required Fields related fields on
* the Proposal Development Document.
* @param proposalDevelopmentDocument document to validate
* @return boolean whether the validation passed or not
*/
private boolean processProposalRequiredFieldsBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
valid = validateProposalTypeField(proposalDevelopmentDocument);
proposalDevelopmentDocument.refreshReferenceObject("sponsor");
if (proposalDevelopmentDocument.getSponsorCode() != null && proposalDevelopmentDocument.getSponsor() == null) {
valid = false;
errorMap.putError("sponsorCode", KeyConstants.ERROR_MISSING, dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "sponsorCode"));
}
//if either is missing, it should be caught on the DD validation.
if (proposalDevelopmentDocument.getRequestedStartDateInitial() != null && proposalDevelopmentDocument.getRequestedEndDateInitial() != null) {
if (proposalDevelopmentDocument.getRequestedStartDateInitial().after(proposalDevelopmentDocument.getRequestedEndDateInitial())) {
valid = false;
errorMap.putError("requestedStartDateInitial", KeyConstants.ERROR_START_DATE_AFTER_END_DATE,
new String[] {dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "requestedStartDateInitial"),
dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "requestedEndDateInitial")});
}
}
return valid;
}
/**
* Validates business rules pertaining to the Proposal Type. The rules are:
*
* <ol>
* <li>If the Proposal Type is Renewal, Revision, or Continuation, then the
* Sponsor Proposal Id field must be assigned a value.</li>
* </ol>
*
* @param proposalDevelopmentDocument the Proposal Development Document
* @return true if valid; otherwise false (if false, the Global ErrorMap is populated)
*/
private boolean validateProposalTypeField(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
String proposalTypeCode = proposalDevelopmentDocument.getProposalTypeCode();
String sponsorProposalId = proposalDevelopmentDocument.getSponsorProposalNumber();
if (isProposalTypeRenewalRevisionContinuation(proposalTypeCode) && StringUtils.isEmpty(sponsorProposalId)) {
valid = false;
errorMap.putError("sponsorProposalNumber", KeyConstants.ERROR_REQUIRED_PROPOSAL_SPONSOR_ID, dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "sponsorProposalNumber"));
}
// TODO: Must add in other validations regarding awards, etc. see KRACOEUS-290.
return valid;
}
/**
* Is the Proposal Type set to Renewal, Revision, or a Continuation?
* @param proposalTypeCode proposal type code
* @return true or false
*/
private boolean isProposalTypeRenewalRevisionContinuation(String proposalTypeCode) {
String proposalTypeCodeRenewal = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_RENEWAL).getParameterValue();
String proposalTypeCodeRevision = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_REVISION).getParameterValue();
String proposalTypeCodeContinuation = getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.PROPOSALDEVELOPMENT_PROPOSALTYPE_CONTINUATION).getParameterValue();
return !StringUtils.isEmpty(proposalTypeCode) &&
(proposalTypeCode.equals(proposalTypeCodeRenewal) ||
proposalTypeCode.equals(proposalTypeCodeRevision) ||
proposalTypeCode.equals(proposalTypeCodeContinuation));
}
/**
*
* Validate organization/location rule. specifically, at least one location is required.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processOrganizationLocationBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
if (proposalDevelopmentDocument.getOrganizationId()!=null && (proposalDevelopmentDocument.getProposalLocations().size()==0 ||
(proposalDevelopmentDocument.getProposalLocations().size()==1 && ((ProposalLocation)(proposalDevelopmentDocument.getProposalLocations().get(0))).getLocationSequenceNumber()==null))) {
reportError("newPropLocation.location", KeyConstants.ERROR_REQUIRED_FOR_PROPLOCATION);
valid = false;
}
return valid;
}
/**
*
* Validate Grants.gov business rules.
* @param proposalDevelopmentDocument
* @return
*/
private boolean processProposalGrantsGovBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
if(proposalDevelopmentDocument.getS2sOpportunity()!= null && proposalDevelopmentDocument.getS2sOpportunity().getOpportunityId()!=null && StringUtils.equalsIgnoreCase(proposalDevelopmentDocument.getS2sOpportunity().getRevisionCode(), getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT,KeyConstants.S2S_REVISIONTYPE_OTHER).getParameterValue()) && (proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription()==null||StringUtils.equals(proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription().trim(), ""))){
reportError("s2sOpportunity.revisionOtherDescription",KeyConstants.ERROR_IF_REVISIONTYPE_IS_OTHER);
valid &= false;
}
if(proposalDevelopmentDocument.getS2sOpportunity()!= null && proposalDevelopmentDocument.getS2sOpportunity().getOpportunityId()!=null && !StringUtils.equalsIgnoreCase(proposalDevelopmentDocument.getS2sOpportunity().getRevisionCode(), getKualiConfigurationService().getParameter(Constants.PARAMETER_MODULE_PROPOSAL_DEVELOPMENT, Constants.PARAMETER_COMPONENT_DOCUMENT, KeyConstants.S2S_REVISIONTYPE_OTHER).getParameterValue()) && (proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription()!=null && !StringUtils.equals(proposalDevelopmentDocument.getS2sOpportunity().getRevisionOtherDescription().trim(), ""))){
reportError("s2sOpportunity.revisionOtherDescription",KeyConstants.ERROR_IF_REVISIONTYPE_IS_NOT_OTHER_SPECIFY_NOT_BLANK);
valid &= false;
}
return valid;
}
public boolean processAddKeyPersonBusinessRules(ProposalDevelopmentDocument document, ProposalPerson person) {
return new ProposalDevelopmentKeyPersonsRule().processAddKeyPersonBusinessRules(document, person);
}
/**
* Validate Sponsor/program Information rule. Regex validation for CFDA number(7 digits with a period in the 3rd character and an optional alpha character in the 7th field).
* @param proposalDevelopmentDocument
* @return
*/
private boolean processSponsorProgramBusinessRule(ProposalDevelopmentDocument proposalDevelopmentDocument) {
boolean valid = true;
String regExpr = "(\\d{2})(\\.)(\\d{3})[a-zA-z]?";
ErrorMap errorMap = GlobalVariables.getErrorMap();
DataDictionaryService dataDictionaryService = KraServiceLocator.getService(DataDictionaryService.class);
if(StringUtils.isNotBlank(proposalDevelopmentDocument.getCfdaNumber()) && !(proposalDevelopmentDocument.getCfdaNumber().matches(regExpr)) && GlobalVariables.getErrorMap().getMessages("document.cfdaNumber") == null)
{
errorMap.putError("cfdaNumber", RiceKeyConstants.ERROR_INVALID_FORMAT, new String []{dataDictionaryService.getAttributeErrorLabel(ProposalDevelopmentDocument.class, "cfdaNumber"), proposalDevelopmentDocument.getCfdaNumber() });
valid = false;
}
return valid;
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.AddNarrativeRule#processAddNarrativeBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument,org.kuali.kra.proposaldevelopment.bo.Narrative)
*/
public boolean processAddNarrativeBusinessRules(AddNarrativeEvent addNarrativeEvent) {
return new ProposalDevelopmentNarrativeRule().processAddNarrativeBusinessRules(addNarrativeEvent); }
/**
* @see org.kuali.core.rule.DocumentAuditRule#processRunAuditBusinessRules(org.kuali.core.document.Document)
*/
public boolean processRunAuditBusinessRules(Document document){
boolean retval = true;
retval &= super.processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentProposalRequiredFieldsAuditRule().processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentSponsorProgramInformationAuditRule().processRunAuditBusinessRules(document);
retval &= new KeyPersonnelAuditRule().processRunAuditBusinessRules(document);
//Change for KRACOEUS-1403
ProposalDevelopmentDocument proposalDevelopmentDocument = (ProposalDevelopmentDocument) document;
proposalDevelopmentDocument.getYnqService().populateProposalQuestions(proposalDevelopmentDocument.getProposalYnqs(), proposalDevelopmentDocument.getYnqGroupNames(), proposalDevelopmentDocument);
processProposalYNQBusinessRule((ProposalDevelopmentDocument) document, true);
retval &= new ProposalDevelopmentYnqAuditRule().processRunAuditBusinessRules(document);
//Change for KRACOEUS-1403 ends here
retval &= new ProposalSpecialReviewAuditRule().processRunAuditBusinessRules(document);
retval &= new ProposalDevelopmentGrantsGovAuditRule().processRunAuditBusinessRules(document);
// audit check for budgetversion with final status
try {
retval &= KraServiceLocator.getService(ProposalDevelopmentService.class).validateBudgetAuditRule((ProposalDevelopmentDocument)document);
} catch (Exception ex) {
// TODO : should log it here
throw new RuntimeException("Validate Budget Audit rules encountered exception", ex);
}
return retval;
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.AbstractsRule#processAddAbstractBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, org.kuali.kra.proposaldevelopment.bo.ProposalAbstract)
*/
public boolean processAddAbstractBusinessRules(ProposalDevelopmentDocument document, ProposalAbstract proposalAbstract) {
return new ProposalDevelopmentAbstractsRule().processAddAbstractBusinessRules(document, proposalAbstract);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.SaveNarrativesRule#processSaveNarrativesBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument)
*/
public boolean processSaveNarrativesBusinessRules(SaveNarrativesEvent saveNarrativesEvent) {
return new ProposalDevelopmentNarrativeRule().processSaveNarrativesBusinessRules(saveNarrativesEvent);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.CopyProposalRule#processCopyProposalBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, org.kuali.kra.proposaldevelopment.bo.ProposalCopyCriteria)
*/
public boolean processCopyProposalBusinessRules(ProposalDevelopmentDocument document, ProposalCopyCriteria criteria) {
return new ProposalDevelopmentCopyRule().processCopyProposalBusinessRules(document, criteria);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddInstituteAttachmentRule#processAddInstituteAttachmentBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddInstituteAttachmentEvent)
*/
public boolean processAddInstituteAttachmentBusinessRules(AddInstituteAttachmentEvent addInstituteAttachmentEvent) {
return new ProposalDevelopmentInstituteAttachmentRule().processAddInstituteAttachmentBusinessRules(addInstituteAttachmentEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentsRule#processAddPersonnelAttachmentsBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentsEvent)
*/
public boolean processAddPersonnelAttachmentBusinessRules(AddPersonnelAttachmentEvent addPersonnelAttachmentEvent) {
return new ProposalDevelopmentPersonnelAttachmentRule().processAddPersonnelAttachmentBusinessRules(addPersonnelAttachmentEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddPersonnelAttachmentsRule#processAddPersonnelAttachmentsBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddPersonnelAttachmentsEvent)
*/
public boolean processSavePersonnelAttachmentBusinessRules(SavePersonnelAttachmentEvent savePersonnelAttachmentEvent) {
return new ProposalDevelopmentPersonnelAttachmentRule().processSavePersonnelAttachmentBusinessRules(savePersonnelAttachmentEvent);
}
/**
* Delegating method for the <code>{@link ChangeKeyPersonRule}</code> which is triggered by the <code>{@link ChangeKeyPersonEvent}</code>
*
* @see org.kuali.kra.proposaldevelopment.rule.ChangeKeyPersonRule#processChangeKeyPersonBusinessRules(org.kuali.kra.proposaldevelopment.bo.ProposalPerson, org.kuali.core.bo.BusinessObject)
*/
public boolean processChangeKeyPersonBusinessRules(ProposalPerson proposalPerson, BusinessObject source,int index) {
return new ProposalDevelopmentKeyPersonsRule().processChangeKeyPersonBusinessRules(proposalPerson, source,index);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddProposalLocationRule#processAddProposalLocationBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddProposalLocationEvent)
*/
public boolean processAddProposalLocationBusinessRules(AddProposalLocationEvent addProposalLocationEvent) {
return new ProposalDevelopmentProposalLocationRule().processAddProposalLocationBusinessRules(addProposalLocationEvent);
}
/**
*
* @see org.kuali.kra.proposaldevelopment.rule.AddProposalSpecialReviewRule#processAddProposalSpecialReviewBusinessRules(org.kuali.kra.proposaldevelopment.rule.event.AddProposalSpecialReviewEvent)
*/
public boolean processAddProposalSpecialReviewBusinessRules(AddProposalSpecialReviewEvent addProposalSpecialReviewEvent) {
return new ProposalDevelopmentProposalSpecialReviewRule().processAddProposalSpecialReviewBusinessRules(addProposalSpecialReviewEvent);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processAddProposalUserBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, org.kuali.kra.proposaldevelopment.bo.ProposalUser)
*/
public boolean processAddProposalUserBusinessRules(ProposalDevelopmentDocument document,List<ProposalUserRoles> list, ProposalUser proposalUser) {
return new ProposalDevelopmentPermissionsRule().processAddProposalUserBusinessRules(document, list, proposalUser);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processDeleteProposalUserBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, int)
*/
public boolean processDeleteProposalUserBusinessRules(ProposalDevelopmentDocument document,List<ProposalUserRoles> list, int index) {
return new ProposalDevelopmentPermissionsRule().processDeleteProposalUserBusinessRules(document, list, index);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.PermissionsRule#processEditProposalUserRolesBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, org.kuali.kra.proposaldevelopment.bo.ProposalUserEditRoles)
*/
public boolean processEditProposalUserRolesBusinessRules(ProposalDevelopmentDocument document, List<ProposalUserRoles> list, ProposalUserEditRoles editRoles) {
return new ProposalDevelopmentPermissionsRule().processEditProposalUserRolesBusinessRules(document, list, editRoles);
}
/**
* Delegate to {@link org.kuali.kra.proposaldevelopment.rules.ProposalDevelopmentKeyPersonsRule#processSaveKeyPersonBusinessRules(ProposalDevelopmentDocument)
*
* @see org.kuali.kra.proposaldevelopment.rule.SaveKeyPersonRule#processSaveKeyPersonBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument)
*/
public boolean processSaveKeyPersonBusinessRules(ProposalDevelopmentDocument document) {
info("In processSaveKeyPersonBusinessRules()");
return new ProposalDevelopmentKeyPersonsRule().processCustomSaveDocumentBusinessRules(document);
}
/**
* @see org.kuali.kra.proposaldevelopment.rule.NewNarrativeUserRightsRule#processNewNarrativeUserRightsBusinessRules(org.kuali.kra.proposaldevelopment.document.ProposalDevelopmentDocument, java.util.List, int)
*/
public boolean processNewNarrativeUserRightsBusinessRules(ProposalDevelopmentDocument document,
List<NarrativeUserRights> newNarrativeUserRights, int narrativeIndex) {
return new ProposalDevelopmentNarrativeRule().processNewNarrativeUserRightsBusinessRules(document, newNarrativeUserRights, narrativeIndex);
}
public boolean processCustomAttributeRules(SaveCustomAttributeEvent saveCustomAttributeEvent) {
return new KraCustomAttributeRule().processCustomAttributeRules(saveCustomAttributeEvent);
}
protected KualiConfigurationService getKualiConfigurationService(){
return KraServiceLocator.getService(KualiConfigurationService.class);
}
public boolean processCalculateCreditSplitBusinessRules(ProposalDevelopmentDocument document) {
// TODO Auto-generated method stub
return new ProposalDevelopmentKeyPersonsRule().processCalculateCreditSplitBusinessRules(document);
}
public boolean processProposalDataOverrideRules(ProposalDataOverrideEvent proposalDataOverrideEvent) {
return new ProposalDevelopmentDataOverrideRule().processProposalDataOverrideRules(proposalDataOverrideEvent);
}
}
| KRACOEUS-1783
| src/main/java/org/kuali/kra/proposaldevelopment/rules/ProposalDevelopmentDocumentRule.java | KRACOEUS-1783 | <ide><path>rc/main/java/org/kuali/kra/proposaldevelopment/rules/ProposalDevelopmentDocumentRule.java
<ide>
<ide> if (proposalDevelopmentDocument.getOrganizationId()!=null && (proposalDevelopmentDocument.getProposalLocations().size()==0 ||
<ide> (proposalDevelopmentDocument.getProposalLocations().size()==1 && ((ProposalLocation)(proposalDevelopmentDocument.getProposalLocations().get(0))).getLocationSequenceNumber()==null))) {
<add> GlobalVariables.getErrorMap().removeFromErrorPath("document");
<ide> reportError("newPropLocation.location", KeyConstants.ERROR_REQUIRED_FOR_PROPLOCATION);
<add> GlobalVariables.getErrorMap().addToErrorPath("document");
<ide> valid = false;
<ide> }
<ide> return valid; |
|
Java | apache-2.0 | ea8856732703400bb05f34f2440255d267068760 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.debugger.memory.agent;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.DebugProcessAdapterImpl;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.SuspendContextImpl;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.jdi.StackFrameProxyImpl;
import com.intellij.debugger.memory.agent.extractor.AgentExtractor;
import com.intellij.debugger.memory.ui.JavaReferenceInfo;
import com.intellij.debugger.memory.ui.SizedReferenceInfo;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.debugger.ui.JavaDebuggerSupport;
import com.intellij.execution.ExecutionListener;
import com.intellij.execution.ExecutionManager;
import com.intellij.execution.JavaExecutionUtil;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.executors.DefaultDebugExecutor;
import com.intellij.execution.impl.ConsoleViewImpl;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JdkUtil;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import one.util.streamex.IntStreamEx;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.io.File;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.jar.Attributes;
public class MemoryAgentUtil {
private static final Logger LOG = Logger.getInstance(MemoryAgentUtil.class);
private static final Key<Boolean> LISTEN_MEMORY_AGENT_STARTUP_FAILED = Key.create("LISTEN_MEMORY_AGENT_STARTUP_FAILED");
private static final int ESTIMATE_OBJECTS_SIZE_LIMIT = 2000;
private static final AtomicBoolean LISTENER_ADDED = new AtomicBoolean(false);
public static void addMemoryAgent(@NotNull JavaParameters parameters) {
if (!DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT) {
return;
}
if (isIbmJdk(parameters)) {
LOG.info("Do not attach memory agent for IBM jdk");
return;
}
ParametersList parametersList = parameters.getVMParametersList();
if (parametersList.getParameters().stream().anyMatch(x -> x.contains("memory_agent"))) return;
boolean isInDebugMode = Registry.is("debugger.memory.agent.debug");
File agentFile = null;
String errorMessage = null;
long start = System.currentTimeMillis();
try {
agentFile = getAgentFile(isInDebugMode);
}
catch (InterruptedException e) {
errorMessage = "Interrupted";
}
catch (ExecutionException e) {
LOG.warn(e.getCause());
errorMessage = "Exception thrown (see logs for details)";
}
catch (TimeoutException e) {
errorMessage = "Timeout";
}
if (errorMessage != null || agentFile == null) {
LOG.warn("Could not extract agent: " + errorMessage);
return;
}
LOG.info("Memory agent extracting took " + (System.currentTimeMillis() - start) + " ms");
String path = JavaExecutionUtil.handleSpacesInAgentPath(agentFile.getAbsolutePath(), "debugger-memory-agent", null);
if (path == null) {
LOG.error("Could not use memory agent file. Spaces are found.");
return;
}
String args = "";
if (isInDebugMode) {
args = "5";// Enable debug messages
}
path += "=" + args;
parametersList.add("-agentpath:" + path);
listenIfStartupFailed();
}
public static List<JavaReferenceInfo> tryCalculateSizes(@NotNull List<JavaReferenceInfo> objects, @Nullable MemoryAgent agent) {
if (agent == null || !agent.canEvaluateObjectsSizes()) return objects;
if (objects.size() > ESTIMATE_OBJECTS_SIZE_LIMIT) {
LOG.info("Too many objects to estimate their sizes");
return objects;
}
try {
long[] sizes = agent.evaluateObjectsSizes(ContainerUtil.map(objects, x -> x.getObjectReference()));
return IntStreamEx.range(0, objects.size())
.mapToObj(i -> new SizedReferenceInfo(objects.get(i).getObjectReference(), sizes[i]))
.reverseSorted(Comparator.comparing(x -> x.size()))
.map(x -> (JavaReferenceInfo)x)
.toList();
}
catch (EvaluateException e) {
LOG.error("Could not estimate objects sizes", e);
}
return objects;
}
public static void loadAgentProxy(@NotNull DebugProcessImpl debugProcess, @NotNull Consumer<MemoryAgent> agentLoaded) {
debugProcess.addDebugProcessListener(new DebugProcessAdapterImpl() {
private final AtomicBoolean isInitializing = new AtomicBoolean(false);
@Override
public void paused(SuspendContextImpl suspendContext) {
if (isInitializing.compareAndSet(false, true)) {
try {
MemoryAgent memoryAgent = initMemoryAgent(suspendContext);
if (memoryAgent == null) {
LOG.warn("Could not initialize memory agent.");
return;
}
agentLoaded.accept(memoryAgent);
debugProcess.removeDebugProcessListener(this);
}
finally {
isInitializing.set(false);
}
}
}
@Nullable
private MemoryAgent initMemoryAgent(@NotNull SuspendContextImpl suspendContext) {
if (!DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT) {
LOG.info("Memory agent disabled");
return AgentLoader.DEFAULT_PROXY;
}
StackFrameProxyImpl frameProxy = suspendContext.getFrameProxy();
if (frameProxy == null) {
LOG.warn("frame proxy is not available");
return null;
}
long start = System.currentTimeMillis();
EvaluationContextImpl evaluationContext = new EvaluationContextImpl(suspendContext, frameProxy);
MemoryAgent agent = new AgentLoader().load(evaluationContext, debugProcess.getVirtualMachineProxy());
LOG.info("Memory agent loading took " + (System.currentTimeMillis() - start) + " ms");
return agent;
}
});
}
private static boolean isIbmJdk(@NotNull JavaParameters parameters) {
Sdk jdk = parameters.getJdk();
String vendor = jdk == null ? null : JdkUtil.getJdkMainAttribute(jdk, Attributes.Name.IMPLEMENTATION_VENDOR);
return vendor != null && StringUtil.containsIgnoreCase(vendor, "ibm");
}
private static File getAgentFile(boolean isInDebugMode) throws InterruptedException, ExecutionException, TimeoutException {
if (isInDebugMode) {
String debugAgentPath = Registry.get("debugger.memory.agent.debug.path").asString();
if (!debugAgentPath.isEmpty()) {
LOG.info("Local memory agent will be used: " + debugAgentPath);
return new File(debugAgentPath);
}
}
return ApplicationManager.getApplication()
.executeOnPooledThread(() -> new AgentExtractor().extract()).get(1, TimeUnit.SECONDS);
}
private static void listenIfStartupFailed() {
Project project = JavaDebuggerSupport.getContextProjectForEditorFieldsInDebuggerConfigurables();
if (project == null || Boolean.TRUE.equals(project.getUserData(LISTEN_MEMORY_AGENT_STARTUP_FAILED))) return;
project.getMessageBus().connect().subscribe(ExecutionManager.EXECUTION_TOPIC, new ExecutionListener() {
@Override
public void processTerminated(@NotNull String executorId,
@NotNull ExecutionEnvironment env,
@NotNull ProcessHandler handler,
int exitCode) {
if (executorId != DefaultDebugExecutor.EXECUTOR_ID || exitCode == 0) return;
RunContentDescriptor content = env.getContentToReuse();
if (content == null) return;
ExecutionConsole console = content.getExecutionConsole();
if (!(console instanceof ConsoleViewImpl)) return;
ConsoleViewImpl consoleView = (ConsoleViewImpl)console;
ApplicationManager.getApplication().invokeLater(() -> {
if (consoleView.hasDeferredOutput()) {
consoleView.flushDeferredText();
}
String[] outputLines = StringUtil.splitByLines(consoleView.getText());
List<String> mentions = StreamEx.of(outputLines).skip(1).filter(x -> x.contains("memory_agent")).limit(10).toList();
if (outputLines.length >= 1 && outputLines[0].contains("memory_agent") && !mentions.isEmpty()) {
Project project = env.getProject();
String name = env.getRunProfile().getName();
String windowId = ExecutionManager.getInstance(project).getContentManager().getToolWindowIdByEnvironment(env);
Attachment[] mentionsInOutput = StreamEx.of(mentions).map(x -> new Attachment("agent_mention.txt", x))
.toArray(new Attachment[0]);
RuntimeExceptionWithAttachments exception =
new RuntimeExceptionWithAttachments("Could not start debug process with memory agent", mentionsInOutput);
String checkboxName = DebuggerBundle.message("label.debugger.general.configurable.enable.memory.agent");
String description =
"Memory agent could not be loaded. <a href=\"Disable\">Disable</a> the agent. To enable it back use \"" +
DebuggerBundle.message("label.debugger.general.configurable.enable.memory.agent") +
"\" option in File | Settings | Build, Execution, Deployment | Debugger";
ExecutionUtil.handleExecutionError(project, windowId, name, exception, description, new HyperlinkListener() {
@Override
public void hyperlinkUpdate(HyperlinkEvent e) {
if (HyperlinkEvent.EventType.ACTIVATED.equals(e.getEventType())) {
DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT = false;
}
}
});
LOG.error(exception);
}
});
}
});
project.putUserData(LISTEN_MEMORY_AGENT_STARTUP_FAILED, true);
}
}
| java/debugger/impl/src/com/intellij/debugger/memory/agent/MemoryAgentUtil.java | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.debugger.memory.agent;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.DebugProcessAdapterImpl;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.SuspendContextImpl;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.jdi.StackFrameProxyImpl;
import com.intellij.debugger.memory.agent.extractor.AgentExtractor;
import com.intellij.debugger.memory.ui.JavaReferenceInfo;
import com.intellij.debugger.memory.ui.SizedReferenceInfo;
import com.intellij.debugger.settings.DebuggerSettings;
import com.intellij.debugger.ui.JavaDebuggerSupport;
import com.intellij.execution.ExecutionListener;
import com.intellij.execution.ExecutionManager;
import com.intellij.execution.JavaExecutionUtil;
import com.intellij.execution.configurations.JavaParameters;
import com.intellij.execution.configurations.ParametersList;
import com.intellij.execution.executors.DefaultDebugExecutor;
import com.intellij.execution.impl.ConsoleViewImpl;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.ExecutionConsole;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Attachment;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.JdkUtil;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import one.util.streamex.IntStreamEx;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkListener;
import java.io.File;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
import java.util.jar.Attributes;
public class MemoryAgentUtil {
private static final Logger LOG = Logger.getInstance(MemoryAgentUtil.class);
private static final Key<Boolean> LISTEN_MEMORY_AGENT_STARTUP_FAILED = Key.create("LISTEN_MEMORY_AGENT_STARTUP_FAILED");
private static final int ESTIMATE_OBJECTS_SIZE_LIMIT = 2000;
private static final AtomicBoolean LISTENER_ADDED = new AtomicBoolean(false);
public static void addMemoryAgent(@NotNull JavaParameters parameters) {
if (!DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT) {
return;
}
if (isIbmJdk(parameters)) {
LOG.info("Do not attach memory agent for IBM jdk");
return;
}
ParametersList parametersList = parameters.getVMParametersList();
if (parametersList.getParameters().stream().anyMatch(x -> x.contains("memory_agent"))) return;
boolean isInDebugMode = Registry.is("debugger.memory.agent.debug");
File agentFile = null;
String errorMessage = null;
long start = System.currentTimeMillis();
try {
agentFile = getAgentFile(isInDebugMode);
}
catch (InterruptedException e) {
errorMessage = "Interrupted";
}
catch (ExecutionException e) {
LOG.warn(e.getCause());
errorMessage = "Exception thrown (see logs for details)";
}
catch (TimeoutException e) {
errorMessage = "Timeout";
}
if (errorMessage != null || agentFile == null) {
LOG.warn("Could not extract agent: " + errorMessage);
return;
}
LOG.info("Memory agent extracting took " + (System.currentTimeMillis() - start) + " ms");
String path = JavaExecutionUtil.handleSpacesInAgentPath(agentFile.getAbsolutePath(), "debugger-memory-agent", null);
if (path == null) {
LOG.error("Could not use memory agent file. Spaces are found.");
return;
}
String args = "";
if (isInDebugMode) {
args = "5";// Enable debug messages
}
path += "=" + args;
parametersList.add("-agentpath:" + path);
listenIfStartupFailed();
}
public static List<JavaReferenceInfo> tryCalculateSizes(@NotNull List<JavaReferenceInfo> objects, @Nullable MemoryAgent agent) {
if (agent == null || !agent.canEvaluateObjectsSizes()) return objects;
if (objects.size() > ESTIMATE_OBJECTS_SIZE_LIMIT) {
LOG.info("Too many objects to estimate their sizess");
return objects;
}
try {
long[] sizes = agent.evaluateObjectsSizes(ContainerUtil.map(objects, x -> x.getObjectReference()));
return IntStreamEx.range(0, objects.size())
.mapToObj(i -> new SizedReferenceInfo(objects.get(i).getObjectReference(), sizes[i]))
.reverseSorted(Comparator.comparing(x -> x.size()))
.map(x -> (JavaReferenceInfo)x)
.toList();
}
catch (EvaluateException e) {
LOG.error("Could not estimate objects sizes");
}
return objects;
}
public static void loadAgentProxy(@NotNull DebugProcessImpl debugProcess, @NotNull Consumer<MemoryAgent> agentLoaded) {
debugProcess.addDebugProcessListener(new DebugProcessAdapterImpl() {
private final AtomicBoolean isInitializing = new AtomicBoolean(false);
@Override
public void paused(SuspendContextImpl suspendContext) {
if (isInitializing.compareAndSet(false, true)) {
try {
MemoryAgent memoryAgent = initMemoryAgent(suspendContext);
if (memoryAgent == null) {
LOG.warn("Could not initialize memory agent.");
return;
}
agentLoaded.accept(memoryAgent);
debugProcess.removeDebugProcessListener(this);
}
finally {
isInitializing.set(false);
}
}
}
@Nullable
private MemoryAgent initMemoryAgent(@NotNull SuspendContextImpl suspendContext) {
if (!DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT) {
LOG.info("Memory agent disabled");
return AgentLoader.DEFAULT_PROXY;
}
StackFrameProxyImpl frameProxy = suspendContext.getFrameProxy();
if (frameProxy == null) {
LOG.warn("frame proxy is not available");
return null;
}
long start = System.currentTimeMillis();
EvaluationContextImpl evaluationContext = new EvaluationContextImpl(suspendContext, frameProxy);
MemoryAgent agent = new AgentLoader().load(evaluationContext, debugProcess.getVirtualMachineProxy());
LOG.info("Memory agent loading took " + (System.currentTimeMillis() - start) + " ms");
return agent;
}
});
}
private static boolean isIbmJdk(@NotNull JavaParameters parameters) {
Sdk jdk = parameters.getJdk();
String vendor = jdk == null ? null : JdkUtil.getJdkMainAttribute(jdk, Attributes.Name.IMPLEMENTATION_VENDOR);
return vendor != null && StringUtil.containsIgnoreCase(vendor, "ibm");
}
private static File getAgentFile(boolean isInDebugMode) throws InterruptedException, ExecutionException, TimeoutException {
if (isInDebugMode) {
String debugAgentPath = Registry.get("debugger.memory.agent.debug.path").asString();
if (!debugAgentPath.isEmpty()) {
return new File(debugAgentPath);
}
}
return ApplicationManager.getApplication()
.executeOnPooledThread(() -> new AgentExtractor().extract()).get(1, TimeUnit.SECONDS);
}
private static void listenIfStartupFailed() {
Project project = JavaDebuggerSupport.getContextProjectForEditorFieldsInDebuggerConfigurables();
if (project == null || Boolean.TRUE.equals(project.getUserData(LISTEN_MEMORY_AGENT_STARTUP_FAILED))) return;
project.getMessageBus().connect().subscribe(ExecutionManager.EXECUTION_TOPIC, new ExecutionListener() {
@Override
public void processTerminated(@NotNull String executorId,
@NotNull ExecutionEnvironment env,
@NotNull ProcessHandler handler,
int exitCode) {
if (executorId != DefaultDebugExecutor.EXECUTOR_ID || exitCode == 0) return;
RunContentDescriptor content = env.getContentToReuse();
if (content == null) return;
ExecutionConsole console = content.getExecutionConsole();
if (!(console instanceof ConsoleViewImpl)) return;
ConsoleViewImpl consoleView = (ConsoleViewImpl)console;
ApplicationManager.getApplication().invokeLater(() -> {
if (consoleView.hasDeferredOutput()) {
consoleView.flushDeferredText();
}
String[] outputLines = StringUtil.splitByLines(consoleView.getText());
List<String> mentions = StreamEx.of(outputLines).skip(1).filter(x -> x.contains("memory_agent")).limit(10).toList();
if (outputLines.length >= 1 && outputLines[0].contains("memory_agent") && !mentions.isEmpty()) {
Project project = env.getProject();
String name = env.getRunProfile().getName();
String windowId = ExecutionManager.getInstance(project).getContentManager().getToolWindowIdByEnvironment(env);
Attachment[] mentionsInOutput = StreamEx.of(mentions).map(x -> new Attachment("agent_mention.txt", x))
.toArray(new Attachment[0]);
RuntimeExceptionWithAttachments exception =
new RuntimeExceptionWithAttachments("Could not start debug process with memory agent", mentionsInOutput);
String checkboxName = DebuggerBundle.message("label.debugger.general.configurable.enable.memory.agent");
String description =
"Memory agent could not be loaded. <a href=\"Disable\">Disable</a> the agent. To enable it back use \"" +
DebuggerBundle.message("label.debugger.general.configurable.enable.memory.agent") +
"\" option in File | Settings | Build, Execution, Deployment | Debugger";
ExecutionUtil.handleExecutionError(project, windowId, name, exception, description, new HyperlinkListener() {
@Override
public void hyperlinkUpdate(HyperlinkEvent e) {
if (HyperlinkEvent.EventType.ACTIVATED.equals(e.getEventType())) {
DebuggerSettings.getInstance().ENABLE_MEMORY_AGENT = false;
}
}
});
LOG.error(exception);
}
});
}
});
project.putUserData(LISTEN_MEMORY_AGENT_STARTUP_FAILED, true);
}
}
| [memory-agent] Minor: improve logging
| java/debugger/impl/src/com/intellij/debugger/memory/agent/MemoryAgentUtil.java | [memory-agent] Minor: improve logging | <ide><path>ava/debugger/impl/src/com/intellij/debugger/memory/agent/MemoryAgentUtil.java
<ide> public static List<JavaReferenceInfo> tryCalculateSizes(@NotNull List<JavaReferenceInfo> objects, @Nullable MemoryAgent agent) {
<ide> if (agent == null || !agent.canEvaluateObjectsSizes()) return objects;
<ide> if (objects.size() > ESTIMATE_OBJECTS_SIZE_LIMIT) {
<del> LOG.info("Too many objects to estimate their sizess");
<add> LOG.info("Too many objects to estimate their sizes");
<ide> return objects;
<ide> }
<ide> try {
<ide> .toList();
<ide> }
<ide> catch (EvaluateException e) {
<del> LOG.error("Could not estimate objects sizes");
<add> LOG.error("Could not estimate objects sizes", e);
<ide> }
<ide>
<ide> return objects;
<ide> if (isInDebugMode) {
<ide> String debugAgentPath = Registry.get("debugger.memory.agent.debug.path").asString();
<ide> if (!debugAgentPath.isEmpty()) {
<add> LOG.info("Local memory agent will be used: " + debugAgentPath);
<ide> return new File(debugAgentPath);
<ide> }
<ide> } |
|
Java | epl-1.0 | 97e97683e446426f2260d01b0b08e6693322f641 | 0 | planetguy32/ForgeEssentials,Techjar/ForgeEssentials,ForgeEssentials/ForgeEssentialsMain,CityOfLearning/ForgeEssentials,liachmodded/ForgeEssentials | package com.forgeessentials.auth;
import java.util.UUID;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.ItemStack;
import net.minecraftforge.event.CommandEvent;
import net.minecraftforge.event.ServerChatEvent;
import net.minecraftforge.event.entity.item.ItemTossEvent;
import net.minecraftforge.event.entity.living.LivingHurtEvent;
import net.minecraftforge.event.entity.minecart.MinecartInteractEvent;
import net.minecraftforge.event.entity.player.AttackEntityEvent;
import net.minecraftforge.event.entity.player.EntityInteractEvent;
import net.minecraftforge.event.entity.player.EntityItemPickupEvent;
import net.minecraftforge.event.entity.player.PlayerInteractEvent;
import net.minecraftforge.permissions.PermissionsManager;
import com.forgeessentials.core.ForgeEssentials;
import com.forgeessentials.util.OutputHandler;
import com.forgeessentials.util.events.PlayerMoveEvent;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.eventhandler.EventPriority;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.PlayerEvent;
public class AuthEventHandler
{
public static String banned;
public static String notvip;
public static String notwhitelisted;
public static boolean whitelist;
public static int vipslots;
public static int offset;
public int counter;
public int maxcounter;
public AuthEventHandler()
{
ForgeEssentials.log.info("FEauth initialized. Enabled: " + ModuleAuth.isEnabled());
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerMove(PlayerMoveEvent event)
{
if (!(event.entityPlayer instanceof EntityPlayerMP))
return;
if (event.before.getX() == event.after.getX() && event.before.getZ() == event.after.getZ())
{
return;
}
if (ModuleAuth.canMoveWithoutLogin)
{
return;
}
if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerChat(ServerChatEvent event)
{
if (!(event.player instanceof EntityPlayerMP))
return;
UUID username = event.player.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerCommand(CommandEvent event)
{
if (!(event.sender instanceof EntityPlayerMP))
return;
if (!(event.sender instanceof EntityPlayer))
{
return;
}
EntityPlayer player = (EntityPlayer) event.sender;
if (!ModuleAuth.hasSession.contains(player.getPersistentID()) && !(event.command instanceof CommandAuth))
{
event.setCanceled(true);
OutputHandler.chatError(player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(PlayerInteractEvent event)
{
if (!(event.entityPlayer instanceof EntityPlayerMP))
return;
if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(EntityInteractEvent event)
{
if (!(event.entityPlayer instanceof EntityPlayerMP))
return;
if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(MinecartInteractEvent event)
{
if (!(event.player instanceof EntityPlayerMP))
return;
if (!ModuleAuth.hasSession.contains(event.player.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerTossItem(ItemTossEvent event)
{
if (!(event.player instanceof EntityPlayerMP))
return;
boolean cancel = false;
if (!ModuleAuth.hasSession.contains(event.player.getPersistentID()))
{
cancel = true;
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
if (cancel)
{
// add the item back to the inventory
ItemStack stack = event.entityItem.getEntityItem();
event.player.inventory.addItemStackToInventory(stack);
event.setCanceled(cancel);
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerPickupItem(EntityItemPickupEvent event)
{
if (!(event.entityPlayer instanceof EntityPlayerMP))
return;
if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerHurt(LivingHurtEvent event)
{
if (!(event.entityLiving instanceof EntityPlayerMP))
return;
EntityPlayerMP player = (EntityPlayerMP) event.entityLiving;
if (!ModuleAuth.hasSession.contains(player.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerAttack(AttackEntityEvent event)
{
if (!(event.entityPlayer instanceof EntityPlayerMP))
return;
if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
/*
* @SubscribeEvent(priority = EventPriority.HIGHEST) public void onPlayerOpenContainer(PlayerOpenContainerEvent
* event) { UUID username = event.entityPlayer.getPersistentID();
*
* if (!ModuleAuth.hasSession.contains(username)) { event.setResult(Result.DENY);
* OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help."); } }
*/
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onLogin(PlayerEvent.PlayerLoggedInEvent e)
{
if (!ModuleAuth.isEnabled())
{
return;
}
if (!PlayerPassData.isRegistered(e.player.getPersistentID()))
{
OutputHandler.chatError(e.player, "Registration required. Try /auth help.");
}
else
{
OutputHandler.chatError(e.player, "Login required. Try /auth help.");
}
maxcounter = FMLCommonHandler.instance().getMinecraftServerInstance().getMaxPlayers() - vipslots - offset;
if (whitelist)
{
if (!PermissionsManager.checkPermission(e.player, "fe.auth.isWhiteListed"))
{
((EntityPlayerMP) e.player).playerNetServerHandler.kickPlayerFromServer(notwhitelisted);
}
}
if (PermissionsManager.checkPermission(e.player, "fe.auth.isVIP"))
{
return;
}
else if (counter == maxcounter)
{
((EntityPlayerMP) e.player).playerNetServerHandler.kickPlayerFromServer(notvip);
}
else
{
counter = counter + 1;
}
}
@SubscribeEvent
public void onLogout(PlayerEvent.PlayerLoggedOutEvent e)
{
ModuleAuth.hasSession.remove(e.player.getPersistentID());
PlayerPassData.removeFromCache(e.player.getPersistentID());
counter = counter - 1;
}
}
| src/main/java/com/forgeessentials/auth/AuthEventHandler.java | package com.forgeessentials.auth;
import java.util.UUID;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.item.ItemStack;
import net.minecraftforge.event.CommandEvent;
import net.minecraftforge.event.ServerChatEvent;
import net.minecraftforge.event.entity.item.ItemTossEvent;
import net.minecraftforge.event.entity.living.LivingHurtEvent;
import net.minecraftforge.event.entity.minecart.MinecartInteractEvent;
import net.minecraftforge.event.entity.player.AttackEntityEvent;
import net.minecraftforge.event.entity.player.EntityInteractEvent;
import net.minecraftforge.event.entity.player.EntityItemPickupEvent;
import net.minecraftforge.event.entity.player.PlayerInteractEvent;
import net.minecraftforge.permissions.PermissionsManager;
import com.forgeessentials.core.ForgeEssentials;
import com.forgeessentials.util.OutputHandler;
import com.forgeessentials.util.events.PlayerMoveEvent;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.common.eventhandler.EventPriority;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.PlayerEvent;
public class AuthEventHandler
{
public static String banned;
public static String notvip;
public static String notwhitelisted;
public static boolean whitelist;
public static int vipslots;
public static int offset;
public int counter;
public int maxcounter;
public AuthEventHandler()
{
ForgeEssentials.log.info("FEauth initialized. Enabled: " + ModuleAuth.isEnabled());
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerMove(PlayerMoveEvent event)
{
UUID username = event.entityPlayer.getPersistentID();
if (event.before.getX() == event.after.getX() && event.before.getZ() == event.after.getZ())
{
return;
}
if (ModuleAuth.canMoveWithoutLogin)
{
return;
}
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerChat(ServerChatEvent event)
{
UUID username = event.player.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerCommand(CommandEvent event)
{
if (!(event.sender instanceof EntityPlayer))
{
return;
}
EntityPlayer player = (EntityPlayer) event.sender;
if (!ModuleAuth.hasSession.contains(player.getPersistentID()) && !(event.command instanceof CommandAuth))
{
event.setCanceled(true);
OutputHandler.chatError(player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(PlayerInteractEvent event)
{
UUID username = event.entityPlayer.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(EntityInteractEvent event)
{
UUID username = event.entityPlayer.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerInteract(MinecartInteractEvent event)
{
UUID username = event.player.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerTossItem(ItemTossEvent event)
{
UUID username = event.player.getPersistentID();
boolean cancel = false;
if (!ModuleAuth.hasSession.contains(username))
{
cancel = true;
OutputHandler.chatError(event.player, "Login required. Try /auth help.");
}
if (cancel)
{
// add the item back to the inventory
ItemStack stack = event.entityItem.getEntityItem();
event.player.inventory.addItemStackToInventory(stack);
event.setCanceled(cancel);
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerPickupItem(EntityItemPickupEvent event)
{
UUID username = event.entityPlayer.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerHurt(LivingHurtEvent event)
{
if (!(event.entityLiving instanceof EntityPlayer))
{
return;
}
EntityPlayer player = (EntityPlayer) event.entityLiving;
if (!ModuleAuth.hasSession.contains(player.getPersistentID()))
{
event.setCanceled(true);
OutputHandler.chatError(player, "Login required. Try /auth help.");
}
}
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onPlayerAttack(AttackEntityEvent event)
{
UUID username = event.entityPlayer.getPersistentID();
if (!ModuleAuth.hasSession.contains(username))
{
event.setCanceled(true);
OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
}
}
/*
* @SubscribeEvent(priority = EventPriority.HIGHEST) public void onPlayerOpenContainer(PlayerOpenContainerEvent
* event) { UUID username = event.entityPlayer.getPersistentID();
*
* if (!ModuleAuth.hasSession.contains(username)) { event.setResult(Result.DENY);
* OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help."); } }
*/
@SubscribeEvent(priority = EventPriority.HIGHEST)
public void onLogin(PlayerEvent.PlayerLoggedInEvent e)
{
if (!ModuleAuth.isEnabled())
{
return;
}
if (!PlayerPassData.isRegistered(e.player.getPersistentID()))
{
OutputHandler.chatError(e.player, "Registration required. Try /auth help.");
}
else
{
OutputHandler.chatError(e.player, "Login required. Try /auth help.");
}
maxcounter = FMLCommonHandler.instance().getMinecraftServerInstance().getMaxPlayers() - vipslots - offset;
if (whitelist)
{
if (!PermissionsManager.checkPermission(e.player, "fe.auth.isWhiteListed"))
{
((EntityPlayerMP) e.player).playerNetServerHandler.kickPlayerFromServer(notwhitelisted);
}
}
if (PermissionsManager.checkPermission(e.player, "fe.auth.isVIP"))
{
return;
}
else if (counter == maxcounter)
{
((EntityPlayerMP) e.player).playerNetServerHandler.kickPlayerFromServer(notvip);
}
else
{
counter = counter + 1;
}
}
@SubscribeEvent
public void onLogout(PlayerEvent.PlayerLoggedOutEvent e)
{
ModuleAuth.hasSession.remove(e.player.getPersistentID());
PlayerPassData.removeFromCache(e.player.getPersistentID());
counter = counter - 1;
}
}
| Fixed some checks in AuthModule. Should fix #1586
| src/main/java/com/forgeessentials/auth/AuthEventHandler.java | Fixed some checks in AuthModule. Should fix #1586 | <ide><path>rc/main/java/com/forgeessentials/auth/AuthEventHandler.java
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerMove(PlayerMoveEvent event)
<ide> {
<del> UUID username = event.entityPlayer.getPersistentID();
<del>
<add> if (!(event.entityPlayer instanceof EntityPlayerMP))
<add> return;
<add>
<ide> if (event.before.getX() == event.after.getX() && event.before.getZ() == event.after.getZ())
<ide> {
<ide> return;
<ide> }
<del>
<ide> if (ModuleAuth.canMoveWithoutLogin)
<ide> {
<ide> return;
<ide> }
<del>
<add> if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
<add> {
<add> event.setCanceled(true);
<add> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
<add> }
<add> }
<add>
<add> @SubscribeEvent(priority = EventPriority.HIGHEST)
<add> public void onPlayerChat(ServerChatEvent event)
<add> {
<add> if (!(event.player instanceof EntityPlayerMP))
<add> return;
<add>
<add> UUID username = event.player.getPersistentID();
<ide> if (!ModuleAuth.hasSession.contains(username))
<ide> {
<ide> event.setCanceled(true);
<del> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
<del> }
<del> }
<del>
<del> @SubscribeEvent(priority = EventPriority.HIGHEST)
<del> public void onPlayerChat(ServerChatEvent event)
<del> {
<del> UUID username = event.player.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<del> {
<del> event.setCanceled(true);
<ide> OutputHandler.chatError(event.player, "Login required. Try /auth help.");
<ide> }
<ide> }
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerCommand(CommandEvent event)
<ide> {
<add> if (!(event.sender instanceof EntityPlayerMP))
<add> return;
<add>
<ide> if (!(event.sender instanceof EntityPlayer))
<ide> {
<ide> return;
<ide> }
<del>
<ide> EntityPlayer player = (EntityPlayer) event.sender;
<del>
<ide> if (!ModuleAuth.hasSession.contains(player.getPersistentID()) && !(event.command instanceof CommandAuth))
<ide> {
<ide> event.setCanceled(true);
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerInteract(PlayerInteractEvent event)
<ide> {
<del> UUID username = event.entityPlayer.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!(event.entityPlayer instanceof EntityPlayerMP))
<add> return;
<add>
<add> if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerInteract(EntityInteractEvent event)
<ide> {
<del> UUID username = event.entityPlayer.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!(event.entityPlayer instanceof EntityPlayerMP))
<add> return;
<add>
<add> if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerInteract(MinecartInteractEvent event)
<ide> {
<del> UUID username = event.player.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!(event.player instanceof EntityPlayerMP))
<add> return;
<add>
<add> if (!ModuleAuth.hasSession.contains(event.player.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> OutputHandler.chatError(event.player, "Login required. Try /auth help.");
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerTossItem(ItemTossEvent event)
<ide> {
<del> UUID username = event.player.getPersistentID();
<add> if (!(event.player instanceof EntityPlayerMP))
<add> return;
<ide>
<ide> boolean cancel = false;
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!ModuleAuth.hasSession.contains(event.player.getPersistentID()))
<ide> {
<ide> cancel = true;
<del>
<ide> OutputHandler.chatError(event.player, "Login required. Try /auth help.");
<ide> }
<ide>
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerPickupItem(EntityItemPickupEvent event)
<ide> {
<del> UUID username = event.entityPlayer.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!(event.entityPlayer instanceof EntityPlayerMP))
<add> return;
<add>
<add> if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help.");
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerHurt(LivingHurtEvent event)
<ide> {
<del> if (!(event.entityLiving instanceof EntityPlayer))
<del> {
<del> return;
<del> }
<del>
<del> EntityPlayer player = (EntityPlayer) event.entityLiving;
<del>
<add> if (!(event.entityLiving instanceof EntityPlayerMP))
<add> return;
<add>
<add> EntityPlayerMP player = (EntityPlayerMP) event.entityLiving;
<ide> if (!ModuleAuth.hasSession.contains(player.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> @SubscribeEvent(priority = EventPriority.HIGHEST)
<ide> public void onPlayerAttack(AttackEntityEvent event)
<ide> {
<del> UUID username = event.entityPlayer.getPersistentID();
<del>
<del> if (!ModuleAuth.hasSession.contains(username))
<add> if (!(event.entityPlayer instanceof EntityPlayerMP))
<add> return;
<add>
<add> if (!ModuleAuth.hasSession.contains(event.entityPlayer.getPersistentID()))
<ide> {
<ide> event.setCanceled(true);
<ide> OutputHandler.chatError(event.entityPlayer, "Login required. Try /auth help."); |
|
Java | apache-2.0 | 4b06670d9ce1b2fcc1db15596bc8ae80dfd6aa52 | 0 | googleapis/java-bigtable-hbase,sduskis/cloud-bigtable-client,googleapis/java-bigtable-hbase,googleapis/java-bigtable-hbase,sduskis/cloud-bigtable-client | /*
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.beam;
import java.io.Serializable;
import java.util.Map;
import java.util.Objects;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.cloud.bigtable.hbase.util.ByteStringer;
import org.apache.beam.sdk.io.range.ByteKey;
import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.hadoop.hbase.client.Scan;
import com.google.bigtable.repackaged.com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowRange;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowSet;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableInstanceName;
import com.google.bigtable.repackaged.com.google.protobuf.ByteString;
import com.google.cloud.bigtable.hbase.adapters.Adapters;
import com.google.cloud.bigtable.hbase.adapters.read.DefaultReadHooks;
import com.google.cloud.bigtable.hbase.adapters.read.ReadHooks;
/**
* This class defines configuration that a Cloud Bigtable client needs to connect to a user's Cloud
* Bigtable instance; a table to connect to in the instance; and a filter on the table in the form of
* a {@link Scan}.
*/
public class CloudBigtableScanConfiguration extends CloudBigtableTableConfiguration {
private static final long serialVersionUID = 2435897354284600685L;
/**
* Converts a {@link CloudBigtableTableConfiguration} object to a
* {@link CloudBigtableScanConfiguration} that will perform the specified {@link Scan} on the
* table.
* @param config The {@link CloudBigtableTableConfiguration} object.
* @param scan The {@link Scan} to add to the configuration.
* @return The new {@link CloudBigtableScanConfiguration}.
*/
public static CloudBigtableScanConfiguration fromConfig(CloudBigtableTableConfiguration config,
Scan scan) {
CloudBigtableScanConfiguration.Builder builder = new CloudBigtableScanConfiguration.Builder();
config.copyConfig(builder);
return builder.withScan(scan).build();
}
/**
* Builds a {@link CloudBigtableScanConfiguration}.
*/
public static class Builder extends CloudBigtableTableConfiguration.Builder {
private ValueProvider<ReadRowsRequest> request;
public Builder() {
}
/**
* Specifies the {@link Scan} that will be used to filter the table.
*
* @param scan The {@link Scan} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
public Builder withScan(Scan scan) {
Preconditions.checkArgument(scan != null, "Scan cannot be null");
ReadHooks readHooks = new DefaultReadHooks();
ReadRowsRequest.Builder builder = Adapters.SCAN_ADAPTER.adapt(scan, readHooks);
withRequest(readHooks.applyPreSendHook(builder.build()));
return this;
}
/**
* Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
public Builder withRequest(ReadRowsRequest request) {
return withRequest(StaticValueProvider.of(request));
}
/**
* Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
Builder withRequest(ValueProvider<ReadRowsRequest> request) {
this.request = request;
return this;
}
/**
* Internal API that allows a Source to configure the request with a new start/stop row range.
* @param startKey The first key, inclusive.
* @param stopKey The last key, exclusive.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
Builder withKeys(byte[] startKey, byte[] stopKey) {
Preconditions.checkNotNull(request, "Request cannot be empty.");
Preconditions.checkState(request.isAccessible(), "Request must be accessible.");
final ByteString start = ByteStringer.wrap(startKey);
final ByteString stop = ByteStringer.wrap(stopKey);
return withRequest(request
.get()
.toBuilder()
.setRows(
RowSet.newBuilder()
.addRowRanges(
RowRange.newBuilder().setStartKeyClosed(start).setEndKeyOpen(stop)))
.build());
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withProjectId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
public Builder withProjectId(String projectId) {
super.withProjectId(projectId);
return this;
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withProjectId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
Builder withProjectId(ValueProvider<String> projectId) {
super.withProjectId(projectId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withInstanceId(String instanceId) {
super.withInstanceId(instanceId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withInstanceId(ValueProvider<String> instanceId) {
super.withInstanceId(instanceId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withAppProfileId(String appProfileId) {
super.withAppProfileId(appProfileId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withAppProfileId(ValueProvider<String> appProfileId) {
super.withAppProfileId(appProfileId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withConfiguration(String key, String value) {
super.withConfiguration(key, value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withConfiguration(String key, ValueProvider<String> value) {
super.withConfiguration(key, value);
return this;
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withTableId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
public Builder withTableId(String tableId) {
super.withTableId(tableId);
return this;
}
/**
* {@inheritDoc} Overrides {@link CloudBigtableTableConfiguration.Builder#withTableId(String)}
* so that it returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
Builder withTableId(ValueProvider<String> tableId) {
super.withTableId(tableId);
return this;
}
/**
* Builds the {@link CloudBigtableScanConfiguration}.
* @return The new {@link CloudBigtableScanConfiguration}.
*/
@Override
public CloudBigtableScanConfiguration build() {
if (request == null) {
withScan(new Scan());
}
return new CloudBigtableScanConfiguration(projectId, instanceId, tableId,
request, additionalConfiguration);
}
}
private final ValueProvider<ReadRowsRequest> request;
/**
* Provides an updated request by setting the table name in the existing request if the table name
* wasn't set.
*/
private static class RequestWithTableNameValueProvider
implements ValueProvider<ReadRowsRequest>, Serializable {
private final ValueProvider<String> projectId;
private final ValueProvider<String> instanceId;
private final ValueProvider<String> tableId;
private final ValueProvider<ReadRowsRequest> request;
private ReadRowsRequest cachedRequest;
RequestWithTableNameValueProvider(
ValueProvider<String> projectId,
ValueProvider<String> instanceId,
ValueProvider<String> tableId,
ValueProvider<ReadRowsRequest> request) {
this.projectId = projectId;
this.instanceId = instanceId;
this.tableId = tableId;
this.request = request;
}
@Override
public ReadRowsRequest get() {
if (cachedRequest == null) {
if (request.get().getTableName().isEmpty()) {
BigtableInstanceName bigtableInstanceName =
new BigtableInstanceName(projectId.get(), instanceId.get());
String fullTableName = bigtableInstanceName.toTableNameStr(tableId.get());
cachedRequest = request.get().toBuilder().setTableName(fullTableName).build();
} else {
cachedRequest = request.get();
}
}
return cachedRequest;
}
@Override
public boolean isAccessible() {
return projectId.isAccessible()
&& instanceId.isAccessible()
&& tableId.isAccessible()
&& request.isAccessible();
}
@Override
public String toString() {
if (isAccessible()) {
return String.valueOf(get());
}
return VALUE_UNAVAILABLE;
}
}
/**
* Creates a {@link CloudBigtableScanConfiguration} using the specified project ID, instance ID,
* table ID, {@link Scan} and additional connection configuration.
* @param projectId The project ID for the instance.
* @param instanceId The instance ID.
* @param tableId The table to connect to in the instance.
* @param request The {@link ReadRowsRequest} that will be used to filter the table.
* @param additionalConfiguration A {@link Map} with additional connection configuration.
*/
protected CloudBigtableScanConfiguration(
ValueProvider<String> projectId,
ValueProvider<String> instanceId,
ValueProvider<String> tableId,
ValueProvider<ReadRowsRequest> request,
Map<String, ValueProvider<String>> additionalConfiguration) {
super(projectId, instanceId, tableId, additionalConfiguration);
this.request = new RequestWithTableNameValueProvider(projectId, instanceId, tableId, request);
}
/**
* Gets the {@link Scan} used to filter the table.
* @return The {@link Scan}.
*/
public ReadRowsRequest getRequest() {
return request.get();
}
/**
* @return The start row for this configuration.
*/
public byte[] getStartRow() {
return getStartRowByteString().toByteArray();
}
/**
* @return The stop row for this configuration.
*/
public byte[] getStopRow() {
return getStopRowByteString().toByteArray();
}
/**
* @return The start row for this configuration.
*/
byte[] getZeroCopyStartRow() {
return ByteStringer.extract(getStartRowByteString());
}
/**
* @return The stop row for this configuration.
*/
byte[] getZeroCopyStopRow() {
return ByteStringer.extract(getStopRowByteString());
}
ByteString getStartRowByteString() {
return getRowRange().getStartKeyClosed();
}
ByteString getStopRowByteString() {
return getRowRange().getEndKeyOpen();
}
RowRange getRowRange() {
RowSet rows = getRequest().getRows();
return rows.getRowRanges(0);
}
@Override
public boolean equals(Object obj) {
return super.equals(obj)
&& Objects.equals(getRequest(), ((CloudBigtableScanConfiguration) obj).getRequest());
}
@Override
public Builder toBuilder() {
Builder builder = new Builder();
copyConfig(builder);
return builder;
}
public void copyConfig(Builder builder) {
super.copyConfig(builder);
builder.withRequest(getRequest());
}
/**
* Creates a {@link ByteKeyRange} representing the start and stop keys for this instance.
* @return A {@link ByteKeyRange}.
*/
public ByteKeyRange toByteKeyRange() {
return ByteKeyRange.of(ByteKey.copyFrom(getZeroCopyStartRow()),
ByteKey.copyFrom(getZeroCopyStopRow()));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.add(
DisplayData.item("readRowsRequest", getDisplayValue(request)).withLabel("ReadRowsRequest"));
}
}
| bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java | /*
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.beam;
import java.io.Serializable;
import java.util.Map;
import java.util.Objects;
import com.google.bigtable.repackaged.com.google.common.base.Preconditions;
import com.google.cloud.bigtable.hbase.util.ByteStringer;
import org.apache.beam.sdk.io.range.ByteKey;
import org.apache.beam.sdk.io.range.ByteKeyRange;
import org.apache.beam.sdk.options.ValueProvider;
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.hadoop.hbase.client.Scan;
import com.google.bigtable.repackaged.com.google.bigtable.v2.ReadRowsRequest;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowRange;
import com.google.bigtable.repackaged.com.google.bigtable.v2.RowSet;
import com.google.bigtable.repackaged.com.google.cloud.bigtable.grpc.BigtableInstanceName;
import com.google.bigtable.repackaged.com.google.protobuf.ByteString;
import com.google.cloud.bigtable.hbase.adapters.Adapters;
import com.google.cloud.bigtable.hbase.adapters.read.DefaultReadHooks;
import com.google.cloud.bigtable.hbase.adapters.read.ReadHooks;
/**
* This class defines configuration that a Cloud Bigtable client needs to connect to a user's Cloud
* Bigtable instance; a table to connect to in the instance; and a filter on the table in the form of
* a {@link Scan}.
*/
public class CloudBigtableScanConfiguration extends CloudBigtableTableConfiguration {
private static final long serialVersionUID = 2435897354284600685L;
/**
* Converts a {@link CloudBigtableTableConfiguration} object to a
* {@link CloudBigtableScanConfiguration} that will perform the specified {@link Scan} on the
* table.
* @param config The {@link CloudBigtableTableConfiguration} object.
* @param scan The {@link Scan} to add to the configuration.
* @return The new {@link CloudBigtableScanConfiguration}.
*/
public static CloudBigtableScanConfiguration fromConfig(CloudBigtableTableConfiguration config,
Scan scan) {
CloudBigtableScanConfiguration.Builder builder = new CloudBigtableScanConfiguration.Builder();
config.copyConfig(builder);
return builder.withScan(scan).build();
}
/**
* Builds a {@link CloudBigtableScanConfiguration}.
*/
public static class Builder extends CloudBigtableTableConfiguration.Builder {
private ValueProvider<ReadRowsRequest> request;
public Builder() {
}
/**
* Specifies the {@link Scan} that will be used to filter the table.
*
* @param scan The {@link Scan} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
public Builder withScan(Scan scan) {
Preconditions.checkArgument(scan != null, "Scan cannot be null");
ReadHooks readHooks = new DefaultReadHooks();
ReadRowsRequest.Builder builder = Adapters.SCAN_ADAPTER.adapt(scan, readHooks);
withRequest(readHooks.applyPreSendHook(builder.build()));
return this;
}
/**
* Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
public Builder withRequest(ReadRowsRequest request) {
return withRequest(StaticValueProvider.of(request));
}
/**
* Specifies the {@link ReadRowsRequest} that will be used to filter the table.
* @param request The {@link ReadRowsRequest} to add to the configuration.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
Builder withRequest(ValueProvider<ReadRowsRequest> request) {
this.request = request;
return this;
}
/**
* Provides an updated request by replacing the row set and adding a row range with start and
* end keys in the existing request.
*/
private static class RequestWithKeysValueProvider
implements ValueProvider<ReadRowsRequest>, Serializable {
private final ByteString start;
private final ByteString stop;
private final ValueProvider<ReadRowsRequest> request;
private ReadRowsRequest cachedRequest;
RequestWithKeysValueProvider(
ByteString start, ByteString stop, ValueProvider<ReadRowsRequest> request) {
this.start = start;
this.stop = stop;
this.request = request;
}
@Override
public ReadRowsRequest get() {
if (cachedRequest == null) {
cachedRequest =
request
.get()
.toBuilder()
.setRows(
RowSet.newBuilder()
.addRowRanges(
RowRange.newBuilder().setStartKeyClosed(start).setEndKeyOpen(stop)))
.build();
}
return cachedRequest;
}
@Override
public boolean isAccessible() {
return request.isAccessible();
}
@Override
public String toString() {
if (isAccessible()) {
return String.valueOf(get());
}
return VALUE_UNAVAILABLE;
}
}
/**
* Internal API that allows a Source to configure the request with a new start/stop row range.
* @param startKey The first key, inclusive.
* @param stopKey The last key, exclusive.
* @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
*/
Builder withKeys(byte[] startKey, byte[] stopKey) {
final ByteString start = ByteStringer.wrap(startKey);
final ByteString stop = ByteStringer.wrap(stopKey);
ValueProvider<ReadRowsRequest> request =
this.request == null
? StaticValueProvider.of(ReadRowsRequest.getDefaultInstance())
: this.request;
return withRequest(new RequestWithKeysValueProvider(start, stop, request));
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withProjectId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
public Builder withProjectId(String projectId) {
super.withProjectId(projectId);
return this;
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withProjectId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
Builder withProjectId(ValueProvider<String> projectId) {
super.withProjectId(projectId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withInstanceId(String instanceId) {
super.withInstanceId(instanceId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withInstanceId(ValueProvider<String> instanceId) {
super.withInstanceId(instanceId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withAppProfileId(String appProfileId) {
super.withAppProfileId(appProfileId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withAppProfileId(ValueProvider<String> appProfileId) {
super.withAppProfileId(appProfileId);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Builder withConfiguration(String key, String value) {
super.withConfiguration(key, value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
Builder withConfiguration(String key, ValueProvider<String> value) {
super.withConfiguration(key, value);
return this;
}
/**
* {@inheritDoc}
*
* <p>Overrides {@link CloudBigtableTableConfiguration.Builder#withTableId(String)} so that it
* returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
public Builder withTableId(String tableId) {
super.withTableId(tableId);
return this;
}
/**
* {@inheritDoc} Overrides {@link CloudBigtableTableConfiguration.Builder#withTableId(String)}
* so that it returns {@link CloudBigtableScanConfiguration.Builder}.
*/
@Override
Builder withTableId(ValueProvider<String> tableId) {
super.withTableId(tableId);
return this;
}
/**
* Builds the {@link CloudBigtableScanConfiguration}.
* @return The new {@link CloudBigtableScanConfiguration}.
*/
@Override
public CloudBigtableScanConfiguration build() {
if (request == null) {
withScan(new Scan());
}
return new CloudBigtableScanConfiguration(projectId, instanceId, tableId,
request, additionalConfiguration);
}
}
private final ValueProvider<ReadRowsRequest> request;
/**
* Provides an updated request by setting the table name in the existing request if the table name
* wasn't set.
*/
private static class RequestWithTableNameValueProvider
implements ValueProvider<ReadRowsRequest>, Serializable {
private final ValueProvider<String> projectId;
private final ValueProvider<String> instanceId;
private final ValueProvider<String> tableId;
private final ValueProvider<ReadRowsRequest> request;
private ReadRowsRequest cachedRequest;
RequestWithTableNameValueProvider(
ValueProvider<String> projectId,
ValueProvider<String> instanceId,
ValueProvider<String> tableId,
ValueProvider<ReadRowsRequest> request) {
this.projectId = projectId;
this.instanceId = instanceId;
this.tableId = tableId;
this.request = request;
}
@Override
public ReadRowsRequest get() {
if (cachedRequest == null) {
if (request.get().getTableName().isEmpty()) {
BigtableInstanceName bigtableInstanceName =
new BigtableInstanceName(projectId.get(), instanceId.get());
String fullTableName = bigtableInstanceName.toTableNameStr(tableId.get());
cachedRequest = request.get().toBuilder().setTableName(fullTableName).build();
} else {
cachedRequest = request.get();
}
}
return cachedRequest;
}
@Override
public boolean isAccessible() {
return projectId.isAccessible()
&& instanceId.isAccessible()
&& tableId.isAccessible()
&& request.isAccessible();
}
@Override
public String toString() {
if (isAccessible()) {
return String.valueOf(get());
}
return VALUE_UNAVAILABLE;
}
}
/**
* Creates a {@link CloudBigtableScanConfiguration} using the specified project ID, instance ID,
* table ID, {@link Scan} and additional connection configuration.
* @param projectId The project ID for the instance.
* @param instanceId The instance ID.
* @param tableId The table to connect to in the instance.
* @param request The {@link ReadRowsRequest} that will be used to filter the table.
* @param additionalConfiguration A {@link Map} with additional connection configuration.
*/
protected CloudBigtableScanConfiguration(
ValueProvider<String> projectId,
ValueProvider<String> instanceId,
ValueProvider<String> tableId,
ValueProvider<ReadRowsRequest> request,
Map<String, ValueProvider<String>> additionalConfiguration) {
super(projectId, instanceId, tableId, additionalConfiguration);
this.request = new RequestWithTableNameValueProvider(projectId, instanceId, tableId, request);
}
/**
* Gets the {@link Scan} used to filter the table.
* @return The {@link Scan}.
*/
public ReadRowsRequest getRequest() {
return request.get();
}
/**
* @return The start row for this configuration.
*/
public byte[] getStartRow() {
return getStartRowByteString().toByteArray();
}
/**
* @return The stop row for this configuration.
*/
public byte[] getStopRow() {
return getStopRowByteString().toByteArray();
}
/**
* @return The start row for this configuration.
*/
byte[] getZeroCopyStartRow() {
return ByteStringer.extract(getStartRowByteString());
}
/**
* @return The stop row for this configuration.
*/
byte[] getZeroCopyStopRow() {
return ByteStringer.extract(getStopRowByteString());
}
ByteString getStartRowByteString() {
return getRowRange().getStartKeyClosed();
}
ByteString getStopRowByteString() {
return getRowRange().getEndKeyOpen();
}
RowRange getRowRange() {
RowSet rows = getRequest().getRows();
return rows.getRowRanges(0);
}
@Override
public boolean equals(Object obj) {
return super.equals(obj)
&& Objects.equals(getRequest(), ((CloudBigtableScanConfiguration) obj).getRequest());
}
@Override
public Builder toBuilder() {
Builder builder = new Builder();
copyConfig(builder);
return builder;
}
public void copyConfig(Builder builder) {
super.copyConfig(builder);
builder.withRequest(getRequest());
}
/**
* Creates a {@link ByteKeyRange} representing the start and stop keys for this instance.
* @return A {@link ByteKeyRange}.
*/
public ByteKeyRange toByteKeyRange() {
return ByteKeyRange.of(ByteKey.copyFrom(getZeroCopyStartRow()),
ByteKey.copyFrom(getZeroCopyStopRow()));
}
@Override
public void populateDisplayData(DisplayData.Builder builder) {
super.populateDisplayData(builder);
builder.add(
DisplayData.item("readRowsRequest", getDisplayValue(request)).withLabel("ReadRowsRequest"));
}
}
| Removing RequestWithKeysValueProvider (#2051)
`CloudBigtableScanConfiguration.withKeys()` is only called after the request materializes. Adding some `Precondition`s and removing `RequestWithKeysValueProvider` which only added complexity. | bigtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java | Removing RequestWithKeysValueProvider (#2051) | <ide><path>igtable-dataflow-parent/bigtable-hbase-beam/src/main/java/com/google/cloud/bigtable/beam/CloudBigtableScanConfiguration.java
<ide> }
<ide>
<ide> /**
<del> * Provides an updated request by replacing the row set and adding a row range with start and
<del> * end keys in the existing request.
<del> */
<del> private static class RequestWithKeysValueProvider
<del> implements ValueProvider<ReadRowsRequest>, Serializable {
<del> private final ByteString start;
<del> private final ByteString stop;
<del> private final ValueProvider<ReadRowsRequest> request;
<del> private ReadRowsRequest cachedRequest;
<del>
<del> RequestWithKeysValueProvider(
<del> ByteString start, ByteString stop, ValueProvider<ReadRowsRequest> request) {
<del> this.start = start;
<del> this.stop = stop;
<del> this.request = request;
<del> }
<del>
<del> @Override
<del> public ReadRowsRequest get() {
<del> if (cachedRequest == null) {
<del> cachedRequest =
<del> request
<del> .get()
<del> .toBuilder()
<del> .setRows(
<del> RowSet.newBuilder()
<del> .addRowRanges(
<del> RowRange.newBuilder().setStartKeyClosed(start).setEndKeyOpen(stop)))
<del> .build();
<del> }
<del> return cachedRequest;
<del> }
<del>
<del> @Override
<del> public boolean isAccessible() {
<del> return request.isAccessible();
<del> }
<del>
<del> @Override
<del> public String toString() {
<del> if (isAccessible()) {
<del> return String.valueOf(get());
<del> }
<del> return VALUE_UNAVAILABLE;
<del> }
<del> }
<del>
<del> /**
<ide> * Internal API that allows a Source to configure the request with a new start/stop row range.
<ide> * @param startKey The first key, inclusive.
<ide> * @param stopKey The last key, exclusive.
<ide> * @return The {@link CloudBigtableScanConfiguration.Builder} for chaining convenience.
<ide> */
<ide> Builder withKeys(byte[] startKey, byte[] stopKey) {
<add> Preconditions.checkNotNull(request, "Request cannot be empty.");
<add> Preconditions.checkState(request.isAccessible(), "Request must be accessible.");
<ide> final ByteString start = ByteStringer.wrap(startKey);
<ide> final ByteString stop = ByteStringer.wrap(stopKey);
<del> ValueProvider<ReadRowsRequest> request =
<del> this.request == null
<del> ? StaticValueProvider.of(ReadRowsRequest.getDefaultInstance())
<del> : this.request;
<del> return withRequest(new RequestWithKeysValueProvider(start, stop, request));
<add> return withRequest(request
<add> .get()
<add> .toBuilder()
<add> .setRows(
<add> RowSet.newBuilder()
<add> .addRowRanges(
<add> RowRange.newBuilder().setStartKeyClosed(start).setEndKeyOpen(stop)))
<add> .build());
<ide> }
<ide>
<ide> /** |
|
Java | mit | 2dcdf57b3e9c4ed208590fd9e61c35224d96f648 | 0 | kbase/njs_wrapper,kbase/njs_wrapper,kbase/njs_wrapper,kbase/njs_wrapper,kbase/njs_wrapper,kbase/njs_wrapper | package us.kbase.common.utils;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import us.kbase.auth.AuthToken;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.axis.*;
import condor.*;
import java.net.URL;
import java.net.MalformedURLException;
import java.rmi.RemoteException;
import javax.xml.rpc.ServiceException;
public class CondorUtils
{
private static ClassAdStructAttr[] buildJobAd(String owner, String jobFileLocation, int clusterId, int jobId)
{
String jobOutputFileLocation = jobFileLocation + ".job.out";
String jobLogFileLocation = jobFileLocation + ".job.log";
String stdOutLocation = jobFileLocation + ".stdout";
String stdErrLocation = jobFileLocation + ".stderr";
String dagmanLockFile = jobFileLocation + ".lock";
String workingDirectory = jobFileLocation.substring(0, jobFileLocation.lastIndexOf("/"));
ClassAdStructAttr[] jobAd =
{
createStringAttribute("Owner", owner), // Need to insert kbase username@realm here
createStringAttribute("Iwd", workingDirectory), // Awe creates a working directory per job (uuid) we may need to generate one, or use the job id. Not sure if condor will create this directory. If not, we may need to handle working directory creation in the async runner script.
createIntAttribute("JobUniverse", 5), // Vanilla Universe
createStringAttribute("Cmd", "run_async_srv_method.sh"),
createIntAttribute("JobStatus", 1), // Idle
createStringAttribute("Env",
"_CONDOR_MAX_LOG=0;" +
"_CONDOR_LOG=" + jobOutputFileLocation), //leaving in for example setting env var - not needed for kbase
createIntAttribute("JobNotification", 0), // Never
createStringAttribute("UserLog", jobLogFileLocation),
createStringAttribute("RemoveKillSig", "SIGUSR1"),
createStringAttribute("Out", stdOutLocation),
createStringAttribute("Err", stdErrLocation),
createStringAttribute("ShouldTransferFiles", "NO"), // Using shared FS
// ERROR schedd log: Job id 62.0 has no Owner attribute. Removing.
// http://research.cs.wisc.edu/htcondor/manual/v7.6/4_1Condor_s_ClassAd.html#sec:classad-reference
// Owner has "Policy" semantics and configuration connotation
// Is used to map jobs ClassAd to machines ClassAd
// http://research.cs.wisc.edu/htcondor/manual/v7.6/3_5Policy_Configuration.html
createExpressionAttribute("Requirements", "TRUE"),
createExpressionAttribute("OnExitRemove",
"(ExitSignal =?= 11 || " +
" (ExitCode =!= UNDEFINED && " +
" ExitCode >=0 && ExitCode <= 2))"),
createStringAttribute("Arguments",
"-f -l . -Debug 3 "), // also leaving - we can modify for kbase arguments
createIntAttribute("ClusterId", clusterId),
createIntAttribute("ProcId", jobId)
};
return jobAd;
}
private static ClassAdStructAttr createStringAttribute(String name, String value)
{
return createAttribute(name, value, ClassAdAttrType.value3);
}
private static ClassAdStructAttr createIntAttribute(String name, int value)
{
return createAttribute(name,
String.valueOf(value),
ClassAdAttrType.value1);
}
private static ClassAdStructAttr createExpressionAttribute(String name, String value)
{
return createAttribute(name, value, ClassAdAttrType.value4);
}
private static ClassAdStructAttr createAttribute(String name, String value, ClassAdAttrType type)
{
ClassAdStructAttr attribute = new ClassAdStructAttr();
attribute.setName(name);
attribute.setValue(value);
attribute.setType(type);
return attribute;
}
public static Map<String, Object> getJobDescr( /* String condorUrl */ String jobId ) throws IOException {
// XXX: Hardcoded path to the script to execute:
String[] cmdScript = new String[]{"/bin/bash", "/home/submitter/submit/njs_wrapper/scripts/condor_q.sh", jobId};
Map<String, Object> respObj = null;
String message = "";
Process p = Runtime.getRuntime().exec(cmdScript);
BufferedReader reader = new BufferedReader(new InputStreamReader( p.getInputStream() ));
String line = reader.readLine();
System.out.println( line );
line = reader.readLine();
message += line;
while ( line != null ) {
System.out.println( line );
line = reader.readLine();
message += line;
}
// if (!aweServerUrl.endsWith("/")) aweServerUrl += "/";
/*
CloseableHttpClient httpClient = HttpClients.createDefault();
HttpGet httpReq = new HttpGet(aweServerUrl + "/job/" + aweJobId);
httpReq.addHeader("Authorization", "OAuth " + token.getToken());
*/
// return parseResponse( jobId );
return respObj;
}
// Parse job status
public static Map<String, Object> parseResponse( String jobId ) throws IOException, ClientProtocolException, JsonParseException, JsonMappingException {
// String postResponse = "" + EntityUtils.toString(response.getEntity());
Map<String, Object> respObj = null;
/*
try {
respObj = new ObjectMapper().readValue( response, Map.class);
} catch (Exception ex) {
String respHead = response.length() <= 1000 ? response :
( response.subSequence(0, 1000) + "..." );
throw new IllegalStateException("Error parsing JSON response from Condor " +
"(" + ex.getMessage() + "). Here is the response head text: \n" +
respHead, ex);
}
*/
// TODO: Query the status int out of response:
int status = 1;
// int status = response.getStatusLine().getStatusCode();
// XXX: Hardcoded path to the script to execute:
String[] cmdScript = new String[]{"/bin/bash", "/home/submitter/submit/njs_wrapper/scripts/condor_q_long.sh", jobId, "LastJobStatus"};
Process p = Runtime.getRuntime().exec( cmdScript );
BufferedReader reader = new BufferedReader(new InputStreamReader( p.getInputStream() ));
String line = reader.readLine();
System.out.println( line );
// TODO: parse the substring after '=' from line
// Gets NPE for job id of bogusJobId
status = Integer.valueOf( line.substring( (line.indexOf("=") + 2), line.length() ) );
// XXX: NPE next line
// Integer jsonStatus = (Integer)respObj.get("status");
// Object errObj = respObj.get("error");
/*
if (status != 200 || jsonStatus != null && jsonStatus != 200 || errObj != null) {
if (jsonStatus == null) jsonStatus = status;
String error = null;
if (errObj != null) {
if (errObj instanceof List) {
List<Object> errList = (List<Object>)errObj;
if (errList.size() == 1 && errList.get(0) instanceof String) error = (String)errList.get(0);
}
if (error == null) error = String.valueOf(errObj);
}
String reason = response.getStatusLine().getReasonPhrase();
String fullMessage = "Condor error code " + jsonStatus + ": " + (error == null ? reason : error);
}
*/
return respObj;
}
public static int submitToCondor( String condorUrl, String owner, String jobFileLocation,
// String jobName, String args, String scriptName, AuthToken auth,
String clientGroups) throws MalformedURLException, RemoteException, ServiceException {
URL scheddLocation = new URL( condorUrl );
// Get a handle on a schedd we can make SOAP call on.
CondorScheddLocator scheddLocator = new CondorScheddLocator();
CondorScheddPortType schedd = scheddLocator.getcondorSchedd(scheddLocation);
// Begin a transaction, allow for 60 seconds between calls
TransactionAndStatus transactionAndStatus = schedd.beginTransaction(60);
Transaction transaction = transactionAndStatus.getTransaction();
// Get a new cluster for the job.
IntAndStatus clusterIdAndStatus = schedd.newCluster(transaction);
int clusterId = clusterIdAndStatus.getInteger();
// Get a new Job ID (aka a ProcId) for the Job.
IntAndStatus jobIdAndStatus = schedd.newJob(transaction, clusterId);
int jobId = jobIdAndStatus.getInteger();
// Build the Job's ClassAd.
ClassAdStructAttr[] jobAd = buildJobAd(owner, jobFileLocation, clusterId, jobId);
// Submit the Job's ClassAd.
schedd.submit(transaction, clusterId, jobId, jobAd);
// Commit the transaction.
schedd.commitTransaction(transaction);
// Ask the Schedd to kick off the Job immediately.
schedd.requestReschedule();
return jobId;
}
public static void main(String[] arguments) throws MalformedURLException, RemoteException, ServiceException
{
String jobId;
if( ! ( arguments.length > 0 ) ) {
jobId = "BogusJobId";
} else {
jobId = arguments[ 0 ];
// URL scheddLocation = new URL( arguments[ 0 ] );
}
// Call parseResponse
// Usage: CondorUtils $1
try{
Map<String, Object> respObj = parseResponse( jobId );
} catch( IOException ex ) {
ex.printStackTrace();
String message = "CondorUtils: Error calling parseResponse from main... " + ex.getMessage();
System.err.println(message);
}
// Call getJobDescr
// Usage: CondorUtils $1
/*
try{
Map<String, Object> respObj = getJobDescr( jobId );
} catch( IOException ex ) {
ex.printStackTrace();
String message = "CondorUtils: Error calling getJobDescr from main... " + ex.getMessage();
System.err.println(message);
// if (log != null) log.logErr(message);
}
*/
}
// Test main for submitting via Spinning API
/*
public static void main(String[] arguments)
throws MalformedURLException, RemoteException, ServiceException
{
URL scheddLocation = new URL(arguments[0]);
String owner = arguments[1];
String jobFileLocation = arguments[2];
// Get a handle on a schedd we can make SOAP call on.
CondorScheddLocator scheddLocator = new CondorScheddLocator();
CondorScheddPortType schedd = scheddLocator.getcondorSchedd(scheddLocation);
// Begin a transaction, allow for 60 seconds between calls
TransactionAndStatus transactionAndStatus = schedd.beginTransaction(60);
Transaction transaction = transactionAndStatus.getTransaction();
// Get a new cluster for the job.
IntAndStatus clusterIdAndStatus = schedd.newCluster(transaction);
int clusterId = clusterIdAndStatus.getInteger();
// Get a new Job ID (aka a ProcId) for the Job.
IntAndStatus jobIdAndStatus = schedd.newJob(transaction, clusterId);
int jobId = jobIdAndStatus.getInteger();
// Build the Job's ClassAd.
ClassAdStructAttr[] jobAd = buildJobAd(owner, jobFileLocation, clusterId, jobId);
// Submit the Job's ClassAd.
schedd.submit(transaction, clusterId, jobId, jobAd);
// Debug: Dump JobAd
System.out.println( "CondorUtils::Dump JobAd: " + jobAd.toString() );
for( int i = 0; i < jobAd.length; i++ ) {
System.out.println( " JobAd[ " + i + " ] name = " + jobAd[ i ].getName() + " JobAd[ " + i + " ] value = " + jobAd[ i ].getValue() );
}
// Commit the transaction.
schedd.commitTransaction(transaction);
// Ask the Schedd to kick off the Job immediately.
schedd.requestReschedule();
}
*/
} // class CondorUtils
| src/us/kbase/common/utils/CondorUtils.java | package us.kbase.common.utils;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import us.kbase.auth.AuthToken;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.axis.*;
import condor.*;
import java.net.URL;
import java.net.MalformedURLException;
import java.rmi.RemoteException;
import javax.xml.rpc.ServiceException;
public class CondorUtils
{
private static ClassAdStructAttr[] buildJobAd(String owner, String jobFileLocation, int clusterId, int jobId)
{
String jobOutputFileLocation = jobFileLocation + ".job.out";
String jobLogFileLocation = jobFileLocation + ".job.log";
String stdOutLocation = jobFileLocation + ".stdout";
String stdErrLocation = jobFileLocation + ".stderr";
String dagmanLockFile = jobFileLocation + ".lock";
String workingDirectory = jobFileLocation.substring(0, jobFileLocation.lastIndexOf("/"));
ClassAdStructAttr[] jobAd =
{
createStringAttribute("Owner", owner), // Need to insert kbase username@realm here
createStringAttribute("Iwd", workingDirectory), // Awe creates a working directory per job (uuid) we may need to generate one, or use the job id. Not sure if condor will create this directory. If not, we may need to handle working directory creation in the async runner script.
createIntAttribute("JobUniverse", 5), // Vanilla Universe
createStringAttribute("Cmd", "run_async_srv_method.sh"),
createIntAttribute("JobStatus", 1), // Idle
createStringAttribute("Env",
"_CONDOR_MAX_LOG=0;" +
"_CONDOR_LOG=" + jobOutputFileLocation), //leaving in for example setting env var - not needed for kbase
createIntAttribute("JobNotification", 0), // Never
createStringAttribute("UserLog", jobLogFileLocation),
createStringAttribute("RemoveKillSig", "SIGUSR1"),
createStringAttribute("Out", stdOutLocation),
createStringAttribute("Err", stdErrLocation),
createStringAttribute("ShouldTransferFiles", "NO"), // Using shared FS
// ERROR schedd log: Job id 62.0 has no Owner attribute. Removing.
// http://research.cs.wisc.edu/htcondor/manual/v7.6/4_1Condor_s_ClassAd.html#sec:classad-reference
// Owner has "Policy" semantics and configuration connotation
// Is used to map jobs ClassAd to machines ClassAd
// http://research.cs.wisc.edu/htcondor/manual/v7.6/3_5Policy_Configuration.html
createExpressionAttribute("Requirements", "TRUE"),
createExpressionAttribute("OnExitRemove",
"(ExitSignal =?= 11 || " +
" (ExitCode =!= UNDEFINED && " +
" ExitCode >=0 && ExitCode <= 2))"),
createStringAttribute("Arguments",
"-f -l . -Debug 3 "), // also leaving - we can modify for kbase arguments
createIntAttribute("ClusterId", clusterId),
createIntAttribute("ProcId", jobId)
};
return jobAd;
}
private static ClassAdStructAttr createStringAttribute(String name, String value)
{
return createAttribute(name, value, ClassAdAttrType.value3);
}
private static ClassAdStructAttr createIntAttribute(String name, int value)
{
return createAttribute(name,
String.valueOf(value),
ClassAdAttrType.value1);
}
private static ClassAdStructAttr createExpressionAttribute(String name, String value)
{
return createAttribute(name, value, ClassAdAttrType.value4);
}
private static ClassAdStructAttr createAttribute(String name, String value, ClassAdAttrType type)
{
ClassAdStructAttr attribute = new ClassAdStructAttr();
attribute.setName(name);
attribute.setValue(value);
attribute.setType(type);
return attribute;
}
public static Map<String, Object> getJobDescr( /* String condorUrl */ String jobId ) throws IOException {
// XXX: Hardcoded path to the script to execute:
String[] cmdScript = new String[]{"/bin/bash", "/home/submitter/submit/njs_wrapper/scripts/condor_q.sh", jobId};
Map<String, Object> respObj = null;
String message = "";
Process p = Runtime.getRuntime().exec(cmdScript);
BufferedReader reader = new BufferedReader(new InputStreamReader( p.getInputStream() ));
String line = reader.readLine();
System.out.println( line );
line = reader.readLine();
message += line;
while ( line != null ) {
System.out.println( line );
line = reader.readLine();
message += line;
}
// if (!aweServerUrl.endsWith("/")) aweServerUrl += "/";
/*
CloseableHttpClient httpClient = HttpClients.createDefault();
HttpGet httpReq = new HttpGet(aweServerUrl + "/job/" + aweJobId);
httpReq.addHeader("Authorization", "OAuth " + token.getToken());
*/
// return parseResponse( jobId );
return respObj;
}
// Parse job status
public static Map<String, Object> parseResponse( String jobId ) throws IOException, ClientProtocolException, JsonParseException, JsonMappingException {
// String postResponse = "" + EntityUtils.toString(response.getEntity());
Map<String, Object> respObj = null;
/*
try {
respObj = new ObjectMapper().readValue( response, Map.class);
} catch (Exception ex) {
String respHead = response.length() <= 1000 ? response :
( response.subSequence(0, 1000) + "..." );
throw new IllegalStateException("Error parsing JSON response from Condor " +
"(" + ex.getMessage() + "). Here is the response head text: \n" +
respHead, ex);
}
*/
// TODO: Query the status int out of response:
int status = 1;
// int status = response.getStatusLine().getStatusCode();
// XXX: Hardcoded path to the script to execute:
String[] cmdScript = new String[]{"/bin/bash", "/home/submitter/submit/njs_wrapper/scripts/condor_q_long.sh", jobId, "LastJobStatus"};
Process p = Runtime.getRuntime().exec( cmdScript );
BufferedReader reader = new BufferedReader(new InputStreamReader( p.getInputStream() ));
String line = reader.readLine();
System.out.println( line );
// TODO: parse the substring after '=' from line
// Gets NPE for job id of bogusJobId
status = Integer.valueOf( line.substring( (line.indexOf("=") + 1), line.length() ) );
// XXX: NPE next line
// Integer jsonStatus = (Integer)respObj.get("status");
// Object errObj = respObj.get("error");
/*
if (status != 200 || jsonStatus != null && jsonStatus != 200 || errObj != null) {
if (jsonStatus == null) jsonStatus = status;
String error = null;
if (errObj != null) {
if (errObj instanceof List) {
List<Object> errList = (List<Object>)errObj;
if (errList.size() == 1 && errList.get(0) instanceof String) error = (String)errList.get(0);
}
if (error == null) error = String.valueOf(errObj);
}
String reason = response.getStatusLine().getReasonPhrase();
String fullMessage = "Condor error code " + jsonStatus + ": " + (error == null ? reason : error);
}
*/
return respObj;
}
public static int submitToCondor( String condorUrl, String owner, String jobFileLocation,
// String jobName, String args, String scriptName, AuthToken auth,
String clientGroups) throws MalformedURLException, RemoteException, ServiceException {
URL scheddLocation = new URL( condorUrl );
// Get a handle on a schedd we can make SOAP call on.
CondorScheddLocator scheddLocator = new CondorScheddLocator();
CondorScheddPortType schedd = scheddLocator.getcondorSchedd(scheddLocation);
// Begin a transaction, allow for 60 seconds between calls
TransactionAndStatus transactionAndStatus = schedd.beginTransaction(60);
Transaction transaction = transactionAndStatus.getTransaction();
// Get a new cluster for the job.
IntAndStatus clusterIdAndStatus = schedd.newCluster(transaction);
int clusterId = clusterIdAndStatus.getInteger();
// Get a new Job ID (aka a ProcId) for the Job.
IntAndStatus jobIdAndStatus = schedd.newJob(transaction, clusterId);
int jobId = jobIdAndStatus.getInteger();
// Build the Job's ClassAd.
ClassAdStructAttr[] jobAd = buildJobAd(owner, jobFileLocation, clusterId, jobId);
// Submit the Job's ClassAd.
schedd.submit(transaction, clusterId, jobId, jobAd);
// Commit the transaction.
schedd.commitTransaction(transaction);
// Ask the Schedd to kick off the Job immediately.
schedd.requestReschedule();
return jobId;
}
public static void main(String[] arguments) throws MalformedURLException, RemoteException, ServiceException
{
String jobId;
if( ! ( arguments.length > 0 ) ) {
jobId = "BogusJobId";
} else {
jobId = arguments[ 0 ];
// URL scheddLocation = new URL( arguments[ 0 ] );
}
// Call parseResponse
// Usage: CondorUtils $1
try{
Map<String, Object> respObj = parseResponse( jobId );
} catch( IOException ex ) {
ex.printStackTrace();
String message = "CondorUtils: Error calling parseResponse from main... " + ex.getMessage();
System.err.println(message);
}
// Call getJobDescr
// Usage: CondorUtils $1
/*
try{
Map<String, Object> respObj = getJobDescr( jobId );
} catch( IOException ex ) {
ex.printStackTrace();
String message = "CondorUtils: Error calling getJobDescr from main... " + ex.getMessage();
System.err.println(message);
// if (log != null) log.logErr(message);
}
*/
}
// Test main for submitting via Spinning API
/*
public static void main(String[] arguments)
throws MalformedURLException, RemoteException, ServiceException
{
URL scheddLocation = new URL(arguments[0]);
String owner = arguments[1];
String jobFileLocation = arguments[2];
// Get a handle on a schedd we can make SOAP call on.
CondorScheddLocator scheddLocator = new CondorScheddLocator();
CondorScheddPortType schedd = scheddLocator.getcondorSchedd(scheddLocation);
// Begin a transaction, allow for 60 seconds between calls
TransactionAndStatus transactionAndStatus = schedd.beginTransaction(60);
Transaction transaction = transactionAndStatus.getTransaction();
// Get a new cluster for the job.
IntAndStatus clusterIdAndStatus = schedd.newCluster(transaction);
int clusterId = clusterIdAndStatus.getInteger();
// Get a new Job ID (aka a ProcId) for the Job.
IntAndStatus jobIdAndStatus = schedd.newJob(transaction, clusterId);
int jobId = jobIdAndStatus.getInteger();
// Build the Job's ClassAd.
ClassAdStructAttr[] jobAd = buildJobAd(owner, jobFileLocation, clusterId, jobId);
// Submit the Job's ClassAd.
schedd.submit(transaction, clusterId, jobId, jobAd);
// Debug: Dump JobAd
System.out.println( "CondorUtils::Dump JobAd: " + jobAd.toString() );
for( int i = 0; i < jobAd.length; i++ ) {
System.out.println( " JobAd[ " + i + " ] name = " + jobAd[ i ].getName() + " JobAd[ " + i + " ] value = " + jobAd[ i ].getValue() );
}
// Commit the transaction.
schedd.commitTransaction(transaction);
// Ask the Schedd to kick off the Job immediately.
schedd.requestReschedule();
}
*/
} // class CondorUtils
| Debug2 parse job status
| src/us/kbase/common/utils/CondorUtils.java | Debug2 parse job status | <ide><path>rc/us/kbase/common/utils/CondorUtils.java
<ide>
<ide> // TODO: parse the substring after '=' from line
<ide> // Gets NPE for job id of bogusJobId
<del> status = Integer.valueOf( line.substring( (line.indexOf("=") + 1), line.length() ) );
<add> status = Integer.valueOf( line.substring( (line.indexOf("=") + 2), line.length() ) );
<ide>
<ide>
<ide> |
|
Java | apache-2.0 | 363f747a013b91c3d35b6b85c536033609bcffd4 | 0 | unic/neba,unic/neba,unic/neba | /*
Copyright 2013 the original author or authors.
Licensed under the Apache License, Version 2.0 the "License";
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.neba.core.resourcemodels.views.json;
import io.neba.api.services.ResourceModelResolver;
import io.neba.core.resourcemodels.mapping.NestedMappingSupport;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.metatype.annotations.AttributeDefinition;
import org.osgi.service.metatype.annotations.Designate;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
import org.osgi.service.metatype.annotations.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.servlet.Servlet;
import java.io.IOException;
import java.util.Enumeration;
import java.util.regex.Pattern;
import static io.neba.core.util.BundleUtil.displayNameOf;
import static java.util.regex.Pattern.compile;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED;
import static javax.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE;
@Component(
property = {
"sling.servlet.extensions=json"
},
service = Servlet.class
)
@Designate(ocd = JsonViewServlets.Configuration.class, factory = true)
public class JsonViewServlets extends SlingAllMethodsServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(JsonViewServlets.class);
private static final Pattern EXPECTED_MODEL_NAME = compile("[A-z0-9_\\-#]+");
private static final long serialVersionUID = -7762218328479266916L;
@Reference
private ResourceModelResolver modelResolver;
@Reference
private NestedMappingSupport nestedMappingSupport;
private Jackson2ModelSerializer serializer;
private Configuration configuration;
@Activate
protected void activate(@Nonnull ComponentContext context, @Nonnull Configuration configuration) {
this.configuration = configuration;
// Jackson is an optional dependency.
try {
Class<?> generatorClass = getClass().getClassLoader().loadClass("com.fasterxml.jackson.core.JsonGenerator");
LOGGER.info("Found JSON generator from {}. JSON views are enabled.", generatorClass.getClassLoader());
this.serializer = new Jackson2ModelSerializer(nestedMappingSupport::getRecordedMappings, configuration.jacksonSettings(), configuration.addTypeAttribute());
} catch (ClassNotFoundException e) {
LOGGER.info("JSON views will not be available since Jackson2 cannot be found from bundle {}. Jackson is an optional dependency. " +
"To use the NEBA model to JSON mapping, install at least the jackson-core " +
"and jackson-databind bundles.", displayNameOf(context.getUsingBundle()));
}
}
/**
* The expected pattern is
* /some/resource/path.[general json view selector].[optional model name selector].json
*/
@Override
protected void doGet(@Nonnull SlingHttpServletRequest request, @Nonnull SlingHttpServletResponse response) throws IOException {
if (this.serializer == null) {
response.sendError(SC_SERVICE_UNAVAILABLE, "The JSON view service is not available.");
LOGGER.warn("A client tried to call the JSON view servlet, but the service is unavailable because either jackson-core or jackson-databind are missing from this bundle's classpath. Responding with HTTP 503 Service unavailable.");
return;
}
String etag = null;
if (configuration.generateEtag()) {
etag = "W/\"" + request.getResource().getResourceMetadata().getModificationTime() + "-" + request.getResource().getPath() + '"';
Enumeration<String> clientEtags = request.getHeaders("If-None-Match");
if (clientEtags.hasMoreElements()) {
while (clientEtags.hasMoreElements()) {
if (etag.equals(clientEtags.nextElement())) {
response.setStatus(SC_NOT_MODIFIED);
return;
}
}
}
}
nestedMappingSupport.beginRecordingMappings();
try {
String[] selectors = request.getRequestPathInfo().getSelectors();
Object model;
if (selectors.length == 1) {
model = modelResolver.resolveMostSpecificModel(request.getResource());
if (model == null) {
response.sendError(SC_NOT_FOUND, "No model could be resolved for resource " + request.getResource().getPath());
return;
}
} else if (selectors.length == 2) {
String modelName = selectors[1];
if (!EXPECTED_MODEL_NAME.matcher(modelName).matches()) {
// XSS security check: Since we echo the model name if no model was found and the model name is user input, we must make sure
// only to echo or record input matching a specific format.
response.sendError(SC_BAD_REQUEST, "Invalid model name. The model name must match the pattern " + EXPECTED_MODEL_NAME.pattern());
return;
}
model = modelResolver.resolveMostSpecificModelWithName(request.getResource(), modelName);
if (model == null) {
response.sendError(SC_NOT_FOUND, "No model with name " + modelName + " could be resolved for resource " + request.getResource().getPath());
return;
}
} else {
response.sendError(SC_BAD_REQUEST, "Invalid selectors. The expected format is <json servlet selector>[.<optional model name>]");
return;
}
response.setContentType("application/json");
response.setHeader("Cache-Control", configuration.cacheControlHeader());
if (etag != null) {
response.setHeader("Etag", etag);
}
response.setCharacterEncoding(this.configuration.encoding());
serializer.serialize(response.getWriter(), model);
} finally {
nestedMappingSupport.endRecordingMappings();
}
}
@Override
public void init() {
LOGGER.info("Servlet instance started");
}
@Override
public void destroy() {
LOGGER.info("Servlet instance stopped");
}
@ObjectClassDefinition(
name = "NEBA model JSON view servlet",
description =
"Renders resources as JSON using a NEBA model." +
"The used model is either the most specific NEBA model for the requested resource's type or a model " +
"with the name specified in the selectors, provided that model is for a compatible resource type. " +
"The JSON view can thus be resolved using </resource/path>.<one of the configured selectors>[.<optional specific model name>].json")
public @interface Configuration {
@AttributeDefinition(
name = "Encoding",
description = "The encoding to use when serializing models to JSON. The JSON specification explicitly defines the encodings that can be used, thus this must be one of http://www.ietf.org/rfc/rfc4627.txt.",
options = {
@Option(label = "UTF-8", value = "UTF-8"),
@Option(label = "UTF-16BE", value = "UTF-16BE"),
@Option(label = "UTF-16LE", value = "UTF-16LE"),
@Option(label = "UTF-32BE", value = "UTF-32BE"),
@Option(label = "UTF-32LE", value = "UTF-32LE")
}
)
String encoding() default "UTF-8";
@AttributeDefinition(
name = "Servlet selectors",
description = "The selectors this servlet is listening for. Note that 'model' is the default used by the Apache Sling Exporter Framework.")
@SuppressWarnings("unused")
String[] sling_servlet_selectors() default "model";
@AttributeDefinition(
name = "Resource types",
description =
"If specified, this servlet will only serve JSON views for resource with one of the given types. " +
"By default, this servlet will serve requests for all resources by registering itself " +
"as the default servlet for the configured selector(s). Defaults to sling/servlet/default. " +
"Note that primary note types, such as nt:unstructured, are not supported by Sling Servlets.")
@SuppressWarnings("unused")
String[] sling_servlet_resourceTypes() default "sling/servlet/default";
@AttributeDefinition(
name = "Ranking",
description =
"Defines the service ranking of the servlet. The higher the number, the higher the ranking. " +
"This can be used to override the default sling models jackson export when using the same selectors and resource types," +
"as servlets with a higher ranking can override servlets with a lower ranking.")
@SuppressWarnings("unused")
int service_ranking() default 0;
@AttributeDefinition(
name = "Jackson features",
description = "Enable or disable serialization or module features using the " +
"respective enumeration names and a boolean flag, " +
"for instance SerializationFeature.INDENT_OUTPUT=false or MapperFeature.SORT_PROPERTIES_ALPHABETICALLY=true.")
String[] jacksonSettings() default "SerializationFeature.WRITE_DATES_AS_TIMESTAMPS=true";
@AttributeDefinition(
name = "Add :type attribute",
description = "Automatically add the resource type for which the model was resolved in an attribute called ':type' to the generated JSON. " +
"This is useful e.g. to determine the frontend components responsible for rendering generated JSON.")
boolean addTypeAttribute() default false;
@AttributeDefinition(
name = "Generate Etag",
description = "Generate an Etag header based on the path and modification date of the request's resource. " +
"Enabling this must be done in combination with a cache-control header that allows caching (see below), " +
"e.g. 'private, max-age=86400, must-revalidate', which would allow in-browser caching for 24 hours.")
boolean generateEtag() default false;
@AttributeDefinition(
name = "Cache-Control header",
description = "Add the following Cache-Control HTTP header to all responses.")
String cacheControlHeader() default "private, no-cache, no-store, must-revalidate";
}
}
| core/src/main/java/io/neba/core/resourcemodels/views/json/JsonViewServlets.java | /*
Copyright 2013 the original author or authors.
Licensed under the Apache License, Version 2.0 the "License";
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.neba.core.resourcemodels.views.json;
import io.neba.api.services.ResourceModelResolver;
import io.neba.core.resourcemodels.mapping.NestedMappingSupport;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.metatype.annotations.AttributeDefinition;
import org.osgi.service.metatype.annotations.Designate;
import org.osgi.service.metatype.annotations.ObjectClassDefinition;
import org.osgi.service.metatype.annotations.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.servlet.Servlet;
import java.io.IOException;
import java.util.Enumeration;
import java.util.regex.Pattern;
import static io.neba.core.util.BundleUtil.displayNameOf;
import static java.util.regex.Pattern.compile;
import static javax.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import static javax.servlet.http.HttpServletResponse.SC_NOT_MODIFIED;
import static javax.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE;
@Component(
property = {
"sling.servlet.extensions=json"
},
service = Servlet.class
)
@Designate(ocd = JsonViewServlets.Configuration.class, factory = true)
public class JsonViewServlets extends SlingAllMethodsServlet {
private static final Logger LOGGER = LoggerFactory.getLogger(JsonViewServlets.class);
private static final Pattern EXPECTED_MODEL_NAME = compile("[A-z0-9_\\-#]+");
private static final long serialVersionUID = -7762218328479266916L;
@Reference
private ResourceModelResolver modelResolver;
@Reference
private NestedMappingSupport nestedMappingSupport;
private Jackson2ModelSerializer serializer;
private Configuration configuration;
@Activate
protected void activate(@Nonnull ComponentContext context, @Nonnull Configuration configuration) {
this.configuration = configuration;
// Jackson is an optional dependency.
try {
Class<?> generatorClass = getClass().getClassLoader().loadClass("com.fasterxml.jackson.core.JsonGenerator");
LOGGER.info("Found JSON generator from {}. JSON views are enabled.", generatorClass.getClassLoader());
this.serializer = new Jackson2ModelSerializer(nestedMappingSupport::getRecordedMappings, configuration.jacksonSettings(), configuration.addTypeAttribute());
} catch (ClassNotFoundException e) {
LOGGER.info("JSON views will not be available since Jackson2 cannot be found from bundle {}. Jackson is an optional dependency. " +
"To use the NEBA model to JSON mapping, install at least the jackson-core " +
"and jackson-databind bundles.", displayNameOf(context.getUsingBundle()));
}
}
/**
* The expected pattern is
* /some/resource/path.[general json view selector].[optional model name selector].json
*/
@Override
protected void doGet(@Nonnull SlingHttpServletRequest request, @Nonnull SlingHttpServletResponse response) throws IOException {
if (this.serializer == null) {
response.sendError(SC_SERVICE_UNAVAILABLE, "The JSON view service is not available.");
LOGGER.warn("A client tried to call the JSON view servlet, but the service is unavailable because either jackson-core or jackson-databind are missing from this bundle's classpath. Responding with HTTP 503 Service unavailable.");
return;
}
String etag = null;
if (configuration.generateEtag()) {
etag = "W/\"" + request.getResource().getResourceMetadata().getModificationTime() + "-" + request.getResource().getPath() + '"';
Enumeration<String> clientEtags = request.getHeaders("If-None-Match");
if (clientEtags.hasMoreElements()) {
while (clientEtags.hasMoreElements()) {
if (etag.equals(clientEtags.nextElement())) {
response.setStatus(SC_NOT_MODIFIED);
return;
}
}
}
}
nestedMappingSupport.beginRecordingMappings();
try {
String[] selectors = request.getRequestPathInfo().getSelectors();
Object model;
if (selectors.length == 1) {
model = modelResolver.resolveMostSpecificModel(request.getResource());
if (model == null) {
response.sendError(SC_NOT_FOUND, "No model could be resolved for resource " + request.getResource().getPath());
return;
}
} else if (selectors.length == 2) {
String modelName = selectors[1];
if (!EXPECTED_MODEL_NAME.matcher(modelName).matches()) {
// XSS security check: Since we echo the model name if no model was found and the model name is user input, we must make sure
// only to echo or record input matching a specific format.
response.sendError(SC_BAD_REQUEST, "Invalid model name. The model name must match the pattern " + EXPECTED_MODEL_NAME.pattern());
return;
}
model = modelResolver.resolveMostSpecificModelWithName(request.getResource(), modelName);
if (model == null) {
response.sendError(SC_NOT_FOUND, "No model with name " + modelName + " could be resolved for resource " + request.getResource().getPath());
return;
}
} else {
response.sendError(SC_BAD_REQUEST, "Invalid selectors. The expected format is <json servlet selector>[.<optional model name>]");
return;
}
response.setContentType("application/json");
response.setHeader("Cache-Control", configuration.cacheControlHeader());
if (etag != null) {
response.setHeader("Etag", etag);
}
response.setCharacterEncoding(this.configuration.encoding());
serializer.serialize(response.getWriter(), model);
} finally {
nestedMappingSupport.endRecordingMappings();
}
}
@Override
public void init() {
LOGGER.info("Servlet instance started");
}
@Override
public void destroy() {
LOGGER.info("Servlet instance stopped");
}
@ObjectClassDefinition(
name = "NEBA model JSON view servlet",
description =
"Renders resources as JSON using a NEBA model." +
"The used model is either the most specific NEBA model for the requested resource's type or a model " +
"with the name specified in the selectors, provided that model is for a compatible resource type. " +
"The JSON view can thus be resolved using </resource/path>.<one of the configured selectors>[.<optional specific model name>].json")
public @interface Configuration {
@AttributeDefinition(
name = "Encoding",
description = "The encoding to use when serializing models to JSON. The JSON specification explicitly defines the encodings that can be used, thus this must be one of http://www.ietf.org/rfc/rfc4627.txt.",
options = {
@Option(label = "UTF-8", value = "UTF-8"),
@Option(label = "UTF-16BE", value = "UTF-16BE"),
@Option(label = "UTF-16LE", value = "UTF-16LE"),
@Option(label = "UTF-32BE", value = "UTF-32BE"),
@Option(label = "UTF-32LE", value = "UTF-32LE")
}
)
String encoding() default "UTF-8";
@AttributeDefinition(
name = "Servlet selectors",
description = "The selectors this servlet is listening for. Note that 'model' is the default used by the Apache Sling Exporter Framework.")
@SuppressWarnings("unused")
String[] sling_servlet_selectors() default "model";
@AttributeDefinition(
name = "Resource types",
description =
"If specified, this servlet will only serve JSON views for resource with one of the given types. " +
"By default, this servlet will serve requests for all resources by registering itself " +
"as the default servlet for the configured selector(s). Defaults to sling/servlet/default. " +
"Note that primary note types, such as nt:unstructured, are not supported by Sling Servlets.")
@SuppressWarnings("unused")
String[] sling_servlet_resourceTypes() default "sling/servlet/default";
@AttributeDefinition(
name = "Jackson features",
description = "Enable or disable serialization or module features using the " +
"respective enumeration names and a boolean flag, " +
"for instance SerializationFeature.INDENT_OUTPUT=false or MapperFeature.SORT_PROPERTIES_ALPHABETICALLY=true.")
String[] jacksonSettings() default "SerializationFeature.WRITE_DATES_AS_TIMESTAMPS=true";
@AttributeDefinition(
name = "Add :type attribute",
description = "Automatically add the resource type for which the model was resolved in an attribute called ':type' to the generated JSON. " +
"This is useful e.g. to determine the frontend components responsible for rendering generated JSON.")
boolean addTypeAttribute() default false;
@AttributeDefinition(
name = "Generate Etag",
description = "Generate an Etag header based on the path and modification date of the request's resource. " +
"Enabling this must be done in combination with a cache-control header that allows caching (see below), " +
"e.g. 'private, max-age=86400, must-revalidate', which would allow in-browser caching for 24 hours.")
boolean generateEtag() default false;
@AttributeDefinition(
name = "Cache-Control header",
description = "Add the following Cache-Control HTTP header to all responses.")
String cacheControlHeader() default "private, no-cache, no-store, must-revalidate";
}
}
| #365 JSON views: Allow user-defined service ranking
| core/src/main/java/io/neba/core/resourcemodels/views/json/JsonViewServlets.java | #365 JSON views: Allow user-defined service ranking | <ide><path>ore/src/main/java/io/neba/core/resourcemodels/views/json/JsonViewServlets.java
<ide> String[] sling_servlet_resourceTypes() default "sling/servlet/default";
<ide>
<ide> @AttributeDefinition(
<add> name = "Ranking",
<add> description =
<add> "Defines the service ranking of the servlet. The higher the number, the higher the ranking. " +
<add> "This can be used to override the default sling models jackson export when using the same selectors and resource types," +
<add> "as servlets with a higher ranking can override servlets with a lower ranking.")
<add> @SuppressWarnings("unused")
<add> int service_ranking() default 0;
<add>
<add> @AttributeDefinition(
<ide> name = "Jackson features",
<ide> description = "Enable or disable serialization or module features using the " +
<ide> "respective enumeration names and a boolean flag, " + |
|
Java | apache-2.0 | error: pathspec 'liferay-workspace/tests/blade.samples.test/src/testFunctional/java/com/liferay/blade/samples/portlet/filter/test/BladeSamplesPortletFilterTest.java' did not match any file(s) known to git
| 46b90a7d6bd52755511a3476399a988209a6c031 | 1 | gamerson/liferay-blade-samples,gamerson/liferay-blade-samples,gamerson/liferay-blade-samples,rafoli/liferay-blade-samples,rafoli/liferay-blade-samples,gamerson/liferay-blade-samples,gamerson/liferay-blade-samples,gamerson/liferay-blade-samples,rafoli/liferay-blade-samples,rafoli/liferay-blade-samples,rafoli/liferay-blade-samples | /**
* Copyright 2000-present Liferay, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.liferay.blade.samples.portlet.filter.test;
import com.liferay.arquillian.portal.annotation.PortalURL;
import com.liferay.portal.kernel.exception.PortalException;
import java.io.File;
import java.net.URL;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.drone.api.annotation.Drone;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
/**
* @author Lawrence Lee
*/
@RunAsClient
@RunWith(Arquillian.class)
public class BladeSamplesPortletFilterTest {
@Deployment
public static JavaArchive create() throws Exception {
final File jarFile = new File(System.getProperty("filterPortletJarFile"));
return ShrinkWrap.createFromZipFile(JavaArchive.class, jarFile);
}
@Test
public void testBladePortletFilter()
throws InterruptedException, PortalException {
_webDriver.get(_portletURL.toExternalForm());
Assert.assertTrue(
"Portlet was not deployed",
isVisible(_bladeSampleFilterPortlet));
Assert.assertTrue(
"Expected Example Filter Portlet, but saw " +
_portletTitle.getText(),
_portletTitle.getText().contentEquals("Example Filter Portlet"));
Assert.assertTrue(
"Expected Custom Attribute = My Custom Attribute Value, but saw " +
_portletBody.getText(),
_portletBody.getText().contentEquals("Custom Attribute = My Custom Attribute Value"));
}
protected boolean isVisible(WebElement webelement) {
WebDriverWait webDriverWait = new WebDriverWait(_webDriver, 5);
try {
webDriverWait.until(ExpectedConditions.visibilityOf(webelement));
return true;
}
catch (org.openqa.selenium.TimeoutException te) {
return false;
}
}
@FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]")
private WebElement _bladeSampleFilterPortlet;
@FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]//..//div[@class='portlet-body']")
private WebElement _portletBody;
@FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]//..//h2")
private WebElement _portletTitle;
@PortalURL("blade_portlet_filter_ExamplePortlet")
private URL _portletURL;
@Drone
private WebDriver _webDriver;
}
| liferay-workspace/tests/blade.samples.test/src/testFunctional/java/com/liferay/blade/samples/portlet/filter/test/BladeSamplesPortletFilterTest.java | Add Filter Portlet Functional Test
| liferay-workspace/tests/blade.samples.test/src/testFunctional/java/com/liferay/blade/samples/portlet/filter/test/BladeSamplesPortletFilterTest.java | Add Filter Portlet Functional Test | <ide><path>iferay-workspace/tests/blade.samples.test/src/testFunctional/java/com/liferay/blade/samples/portlet/filter/test/BladeSamplesPortletFilterTest.java
<add>/**
<add> * Copyright 2000-present Liferay, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package com.liferay.blade.samples.portlet.filter.test;
<add>
<add>
<add>import com.liferay.arquillian.portal.annotation.PortalURL;
<add>import com.liferay.portal.kernel.exception.PortalException;
<add>
<add>import java.io.File;
<add>
<add>import java.net.URL;
<add>
<add>import org.jboss.arquillian.container.test.api.Deployment;
<add>import org.jboss.arquillian.container.test.api.RunAsClient;
<add>import org.jboss.arquillian.drone.api.annotation.Drone;
<add>import org.jboss.arquillian.junit.Arquillian;
<add>import org.jboss.shrinkwrap.api.ShrinkWrap;
<add>import org.jboss.shrinkwrap.api.spec.JavaArchive;
<add>
<add>import org.junit.Assert;
<add>import org.junit.Test;
<add>import org.junit.runner.RunWith;
<add>
<add>import org.openqa.selenium.WebDriver;
<add>import org.openqa.selenium.WebElement;
<add>import org.openqa.selenium.support.FindBy;
<add>import org.openqa.selenium.support.ui.ExpectedConditions;
<add>import org.openqa.selenium.support.ui.WebDriverWait;
<add>
<add>/**
<add> * @author Lawrence Lee
<add> */
<add>@RunAsClient
<add>@RunWith(Arquillian.class)
<add>public class BladeSamplesPortletFilterTest {
<add> @Deployment
<add> public static JavaArchive create() throws Exception {
<add> final File jarFile = new File(System.getProperty("filterPortletJarFile"));
<add>
<add> return ShrinkWrap.createFromZipFile(JavaArchive.class, jarFile);
<add> }
<add>
<add> @Test
<add> public void testBladePortletFilter()
<add> throws InterruptedException, PortalException {
<add>
<add> _webDriver.get(_portletURL.toExternalForm());
<add>
<add> Assert.assertTrue(
<add> "Portlet was not deployed",
<add> isVisible(_bladeSampleFilterPortlet));
<add>
<add> Assert.assertTrue(
<add> "Expected Example Filter Portlet, but saw " +
<add> _portletTitle.getText(),
<add> _portletTitle.getText().contentEquals("Example Filter Portlet"));
<add>
<add> Assert.assertTrue(
<add> "Expected Custom Attribute = My Custom Attribute Value, but saw " +
<add> _portletBody.getText(),
<add> _portletBody.getText().contentEquals("Custom Attribute = My Custom Attribute Value"));
<add>
<add> }
<add>
<add> protected boolean isVisible(WebElement webelement) {
<add> WebDriverWait webDriverWait = new WebDriverWait(_webDriver, 5);
<add>
<add> try {
<add> webDriverWait.until(ExpectedConditions.visibilityOf(webelement));
<add>
<add> return true;
<add> }
<add> catch (org.openqa.selenium.TimeoutException te) {
<add> return false;
<add> }
<add> }
<add>
<add> @FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]")
<add> private WebElement _bladeSampleFilterPortlet;
<add>
<add> @FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]//..//div[@class='portlet-body']")
<add> private WebElement _portletBody;
<add>
<add> @FindBy(xpath = "//div[contains(@id,'blade_portlet_filter_ExamplePortlet')]//..//h2")
<add> private WebElement _portletTitle;
<add>
<add> @PortalURL("blade_portlet_filter_ExamplePortlet")
<add> private URL _portletURL;
<add>
<add> @Drone
<add> private WebDriver _webDriver;
<add>
<add>} |
|
Java | mit | b4b208899236b806553178d3b2796b15ba575082 | 0 | melonhead901/programming-competition,melonhead901/programming-competition | package aoc20;
import java.util.Scanner;
import java.util.Stack;
public class Day18 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String line = in.nextLine();
long sum = 0;
while (!line.isBlank()) {
long result = processLine(line);
sum += result;
System.err.println(result);
line = in.nextLine();
}
System.out.println(sum);
}
private static long processLine(String line) {
Stack<Long> values = new Stack<>();
Stack<Character> ops = new Stack<>();
for (int i = 0; i < line.length(); i++) {
char c = line.charAt(i);
if (c == ' ') {
continue;
}
if (Character.isDigit(c)) {
values.push(Long.parseLong(c + ""));
} else if ((c == '+') || (c == '*')) {
while (!(ops.isEmpty() || ((ops.peek() == '*') && (c == '+')) || (ops.peek() == '('))) {
doProcess(values, ops.pop());
}
ops.push(c);
} else if (c == '(') {
ops.push(c);
} else if (c == ')') {
while (ops.peek() != '(') {
doProcess(values, ops.pop());
}
ops.pop();
} else {
throw new IllegalStateException();
}
}
while (!ops.isEmpty()) {
doProcess(values, ops.pop());
}
return values.pop();
}
private static void doProcess(Stack<Long> values, Character pop) {
long a = values.pop();
long b = values.pop();
switch (pop) {
case '+':
values.push(a + b);
return;
case '*':
values.push(a * b);
return;
default:
throw new IllegalStateException(pop +"");
}
}
}
| AdventOfCode/src/aoc20/Day18.java | package aoc20;
import java.util.Scanner;
import java.util.Stack;
public class Day18 {
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
String line = in.nextLine();
long sum = 0;
while (!line.isBlank()) {
long result = processLine(line);
sum += result;
System.err.println(result);
line = in.nextLine();
}
System.out.println(sum);
}
private static long processLine(String line) {
Stack<Long> values = new Stack<>();
Stack<Character> ops = new Stack<>();
for (int i = 0; i < line.length(); i++) {
char c = line.charAt(i);
if (c == ' ') {
continue;
}
if (Character.isDigit(c)) {
values.push(Long.parseLong(c + ""));
} else if ((c == '+') || (c == '*')) {
while (!ops.isEmpty() && (ops.peek() != '(')) {
doProcess(values, ops.pop());
}
ops.push(c);
} else if (c == '(') {
ops.push(c);
} else if (c == ')') {
while (ops.peek() != '(') {
doProcess(values, ops.pop());
}
ops.pop();
} else {
throw new IllegalStateException();
}
}
while (!ops.isEmpty()) {
doProcess(values, ops.pop());
}
return values.pop();
}
private static void doProcess(Stack<Long> values, Character pop) {
long a = values.pop();
long b = values.pop();
switch (pop) {
case '+':
values.push(a + b);
return;
case '*':
values.push(a * b);
return;
default:
throw new IllegalStateException(pop +"");
}
}
}
| day 18 2
| AdventOfCode/src/aoc20/Day18.java | day 18 2 | <ide><path>dventOfCode/src/aoc20/Day18.java
<ide> if (Character.isDigit(c)) {
<ide> values.push(Long.parseLong(c + ""));
<ide> } else if ((c == '+') || (c == '*')) {
<del> while (!ops.isEmpty() && (ops.peek() != '(')) {
<add> while (!(ops.isEmpty() || ((ops.peek() == '*') && (c == '+')) || (ops.peek() == '('))) {
<ide> doProcess(values, ops.pop());
<ide> }
<ide> ops.push(c); |
|
Java | apache-2.0 | 31588b2d61c74d42a71a0bfba384225856ccfcae | 0 | ajordens/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,ajordens/clouddriver,ajordens/clouddriver,spinnaker/clouddriver,spinnaker/clouddriver | /*
* Copyright 2019 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.jobs.local;
import com.netflix.spinnaker.clouddriver.jobs.JobExecutionException;
import com.netflix.spinnaker.clouddriver.jobs.JobExecutor;
import com.netflix.spinnaker.clouddriver.jobs.JobRequest;
import com.netflix.spinnaker.clouddriver.jobs.JobResult;
import java.io.*;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.exec.*;
@Slf4j
public class JobExecutorLocal implements JobExecutor {
// We don't actually use this executor to run the jobs as we're deferring to the Apache Commons
// library to do this. Ideally we'd refactor this class to use ProcessBuilder, but given that
// the main consumer is the Kubernetes provider and we have plans to refactor it to use a client
// library, it is not worth the effort at this point.
// This executor is only used to parsing the output of a job when running in streaming mode; the
// main thread waits on the job while the output parsing is sent to the executor.
private final ExecutorService executorService = Executors.newCachedThreadPool();
private final long timeoutMinutes;
public JobExecutorLocal(long timeoutMinutes) {
this.timeoutMinutes = timeoutMinutes;
}
@Override
public JobResult<String> runJob(final JobRequest jobRequest) {
return executeWrapper(jobRequest, this::execute);
}
@Override
public <T> JobResult<T> runJob(final JobRequest jobRequest, ReaderConsumer<T> readerConsumer) {
return executeWrapper(jobRequest, request -> executeStreaming(request, readerConsumer));
}
private <T> JobResult<T> executeWrapper(
final JobRequest jobRequest, RequestExecutor<T> requestExecutor) {
log.debug(String.format("Starting job: '%s'...", jobRequest.toString()));
final String jobId = UUID.randomUUID().toString();
JobResult<T> jobResult;
try {
jobResult = requestExecutor.execute(jobRequest);
} catch (IOException e) {
throw new JobExecutionException(
String.format("Error executing job: %s", jobRequest.toString()), e);
}
if (jobResult.isKilled()) {
log.warn(String.format("Job %s timed out (after %d minutes)", jobId, timeoutMinutes));
}
return jobResult;
}
private JobResult<String> execute(JobRequest jobRequest) throws IOException {
ByteArrayOutputStream stdOut = new ByteArrayOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
Executor executor =
buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream()));
int exitValue = executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment());
return JobResult.<String>builder()
.result(exitValue == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
.killed(executor.getWatchdog().killedProcess())
.output(stdOut.toString())
.error(stdErr.toString())
.build();
}
private <T> JobResult<T> executeStreaming(JobRequest jobRequest, ReaderConsumer<T> consumer)
throws IOException {
PipedOutputStream stdOut = new PipedOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
Executor executor =
buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream()));
// Send a task to the executor to consume the output from the job.
Future<T> futureResult =
executorService.submit(
() ->
consumer.consume(
new BufferedReader(new InputStreamReader(new PipedInputStream(stdOut)))));
int exitValue = executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment());
T result;
try {
result = futureResult.get();
} catch (InterruptedException e) {
executor.getWatchdog().destroyProcess();
Thread.currentThread().interrupt();
throw new JobExecutionException(
String.format("Interrupted while executing job: %s", jobRequest.toString()), e);
} catch (ExecutionException e) {
throw new JobExecutionException(
String.format("Error parsing output of job: %s", jobRequest.toString()), e.getCause());
}
return JobResult.<T>builder()
.result(exitValue == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
.killed(executor.getWatchdog().killedProcess())
.output(result)
.error(stdErr.toString())
.build();
}
private Executor buildExecutor(ExecuteStreamHandler streamHandler) {
Executor executor = new DefaultExecutor();
executor.setStreamHandler(streamHandler);
executor.setWatchdog(new ExecuteWatchdog(timeoutMinutes * 60 * 1000));
// Setting this to null causes the executor to skip verifying exit codes; we'll handle checking
// the exit status instead of having the executor throw an exception for non-zero exit codes.
executor.setExitValues(null);
return executor;
}
interface RequestExecutor<U> {
JobResult<U> execute(JobRequest jobRequest) throws IOException;
}
}
| clouddriver-core/src/main/groovy/com/netflix/spinnaker/clouddriver/jobs/local/JobExecutorLocal.java | /*
* Copyright 2019 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.clouddriver.jobs.local;
import com.netflix.spinnaker.clouddriver.jobs.JobExecutionException;
import com.netflix.spinnaker.clouddriver.jobs.JobExecutor;
import com.netflix.spinnaker.clouddriver.jobs.JobRequest;
import com.netflix.spinnaker.clouddriver.jobs.JobResult;
import java.io.*;
import java.util.UUID;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.exec.*;
@Slf4j
public class JobExecutorLocal implements JobExecutor {
private final long timeoutMinutes;
public JobExecutorLocal(long timeoutMinutes) {
this.timeoutMinutes = timeoutMinutes;
}
@Override
public JobResult<String> runJob(final JobRequest jobRequest) {
return executeWrapper(jobRequest, this::execute);
}
@Override
public <T> JobResult<T> runJob(final JobRequest jobRequest, ReaderConsumer<T> readerConsumer) {
return executeWrapper(jobRequest, request -> executeStreaming(request, readerConsumer));
}
private <T> JobResult<T> executeWrapper(
final JobRequest jobRequest, RequestExecutor<T> requestExecutor) {
log.debug(String.format("Starting job: '%s'...", jobRequest.toString()));
final String jobId = UUID.randomUUID().toString();
JobResult<T> jobResult;
try {
jobResult = requestExecutor.execute(jobRequest);
} catch (IOException e) {
throw new JobExecutionException(
String.format("Error executing job: %s", jobRequest.toString()), e);
}
if (jobResult.isKilled()) {
log.warn(String.format("Job %s timed out (after %d minutes)", jobId, timeoutMinutes));
}
return jobResult;
}
private JobResult<String> execute(JobRequest jobRequest) throws IOException {
ByteArrayOutputStream stdOut = new ByteArrayOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
Executor executor =
buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream()));
int exitValue = executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment());
return JobResult.<String>builder()
.result(exitValue == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
.killed(executor.getWatchdog().killedProcess())
.output(stdOut.toString())
.error(stdErr.toString())
.build();
}
private <T> JobResult<T> executeStreaming(JobRequest jobRequest, ReaderConsumer<T> consumer)
throws IOException {
PipedOutputStream stdOut = new PipedOutputStream();
ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
Executor executor =
buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream()));
DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment(), resultHandler);
T result;
try {
result =
consumer.consume(new BufferedReader(new InputStreamReader(new PipedInputStream(stdOut))));
} catch (IOException e) {
throw new JobExecutionException(
String.format("Error parsing output of job: %s", jobRequest.toString()), e);
}
try {
resultHandler.waitFor();
} catch (InterruptedException e) {
executor.getWatchdog().destroyProcess();
Thread.currentThread().interrupt();
throw new JobExecutionException(
String.format("Interrupted while executing job: %s", jobRequest.toString()), e);
}
return JobResult.<T>builder()
.result(
resultHandler.getExitValue() == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
.killed(executor.getWatchdog().killedProcess())
.output(result)
.error(stdErr.toString())
.build();
}
private Executor buildExecutor(ExecuteStreamHandler streamHandler) {
Executor executor = new DefaultExecutor();
executor.setStreamHandler(streamHandler);
executor.setWatchdog(new ExecuteWatchdog(timeoutMinutes * 60 * 1000));
// Setting this to null causes the executor to skip verifying exit codes; we'll handle checking
// the exit status instead of having the executor throw an exception for non-zero exit codes.
executor.setExitValues(null);
return executor;
}
interface RequestExecutor<U> {
JobResult<U> execute(JobRequest jobRequest) throws IOException;
}
}
| perf(kubernetes): Reduce latency of streaming job executor (#4804)
* perf(kubernetes): Reduce latency of streaming job executor
Now that we always live lookup events when loading a manifest, the
latency of that call becomes more important. (It was always important
for users with liveManifestCalls, but now becomes important for
everyone.)
In general, for kubectl calls that might return multiple items, we
parse the output as it is produced to avoid needing to buffer a lot
of intermediate data. We do this by requesting asynchronous execution
of the job, then using the main thread to parse any output from the
job as it is produced.
It turns out that the apache-commons library we use adds a fair amount
of latency for quick asynchronous jobs. This is because the .waitFor()
function on the resultHandler just does a poll/sleep loop to wait for
the forked process to finish, which causes us to often wait quite a
bit longer than necessary. This matters much less for long jobs (or
for background processes) but for responding to user requests and in
cases where the call is fast (which is usually the case for getting
events where there are often none) this is significant.
Instead of requesting the job to run asynchronously and using the
main thread to parse the output, let's do the opposite. We'll send
an asynchronous request to parse the output, then run the job
synchronously on the main thread. This avoids the issues with latency
because none of these threads poll.
Ideally we'd move away from this commons-exec library and just use
ProcessBuilder, but given that we'll soon move clouddriver-kubernetes
to use the client library, it's not worth the effort at this point.
In my testing, this reduced the average time for a call to get
manifests from 268ms to 210ms (a 22% reduction); the reduction of
~50ms roughly matches the polling freqency from before, which makes
sense as now we don't need to wait a polling cycle to return.
* style(core): Rename executor to executorService
This avoids shadowing it in the function where it is used. | clouddriver-core/src/main/groovy/com/netflix/spinnaker/clouddriver/jobs/local/JobExecutorLocal.java | perf(kubernetes): Reduce latency of streaming job executor (#4804) | <ide><path>louddriver-core/src/main/groovy/com/netflix/spinnaker/clouddriver/jobs/local/JobExecutorLocal.java
<ide> import com.netflix.spinnaker.clouddriver.jobs.JobResult;
<ide> import java.io.*;
<ide> import java.util.UUID;
<add>import java.util.concurrent.ExecutionException;
<add>import java.util.concurrent.ExecutorService;
<add>import java.util.concurrent.Executors;
<add>import java.util.concurrent.Future;
<ide> import lombok.extern.slf4j.Slf4j;
<ide> import org.apache.commons.exec.*;
<ide>
<ide> @Slf4j
<ide> public class JobExecutorLocal implements JobExecutor {
<add> // We don't actually use this executor to run the jobs as we're deferring to the Apache Commons
<add> // library to do this. Ideally we'd refactor this class to use ProcessBuilder, but given that
<add> // the main consumer is the Kubernetes provider and we have plans to refactor it to use a client
<add> // library, it is not worth the effort at this point.
<add> // This executor is only used to parsing the output of a job when running in streaming mode; the
<add> // main thread waits on the job while the output parsing is sent to the executor.
<add> private final ExecutorService executorService = Executors.newCachedThreadPool();
<ide> private final long timeoutMinutes;
<ide>
<ide> public JobExecutorLocal(long timeoutMinutes) {
<ide> throws IOException {
<ide> PipedOutputStream stdOut = new PipedOutputStream();
<ide> ByteArrayOutputStream stdErr = new ByteArrayOutputStream();
<del>
<ide> Executor executor =
<ide> buildExecutor(new PumpStreamHandler(stdOut, stdErr, jobRequest.getInputStream()));
<del> DefaultExecuteResultHandler resultHandler = new DefaultExecuteResultHandler();
<del> executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment(), resultHandler);
<add>
<add> // Send a task to the executor to consume the output from the job.
<add> Future<T> futureResult =
<add> executorService.submit(
<add> () ->
<add> consumer.consume(
<add> new BufferedReader(new InputStreamReader(new PipedInputStream(stdOut)))));
<add> int exitValue = executor.execute(jobRequest.getCommandLine(), jobRequest.getEnvironment());
<ide>
<ide> T result;
<ide> try {
<del> result =
<del> consumer.consume(new BufferedReader(new InputStreamReader(new PipedInputStream(stdOut))));
<del> } catch (IOException e) {
<del> throw new JobExecutionException(
<del> String.format("Error parsing output of job: %s", jobRequest.toString()), e);
<del> }
<del>
<del> try {
<del> resultHandler.waitFor();
<add> result = futureResult.get();
<ide> } catch (InterruptedException e) {
<ide> executor.getWatchdog().destroyProcess();
<ide> Thread.currentThread().interrupt();
<ide> throw new JobExecutionException(
<ide> String.format("Interrupted while executing job: %s", jobRequest.toString()), e);
<add> } catch (ExecutionException e) {
<add> throw new JobExecutionException(
<add> String.format("Error parsing output of job: %s", jobRequest.toString()), e.getCause());
<ide> }
<ide>
<ide> return JobResult.<T>builder()
<del> .result(
<del> resultHandler.getExitValue() == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
<add> .result(exitValue == 0 ? JobResult.Result.SUCCESS : JobResult.Result.FAILURE)
<ide> .killed(executor.getWatchdog().killedProcess())
<ide> .output(result)
<ide> .error(stdErr.toString()) |
|
Java | agpl-3.0 | 81fce2672075143e0d2f8eb3065a9f808665ff83 | 0 | JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio | /*
* VisualMode.java
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.source.editors.text.visualmode;
import java.util.ArrayList;
import java.util.List;
import org.rstudio.core.client.BrowseCap;
import org.rstudio.core.client.CommandWithArg;
import org.rstudio.core.client.DebouncedCommand;
import org.rstudio.core.client.Debug;
import org.rstudio.core.client.PreemptiveTaskQueue;
import org.rstudio.core.client.Rendezvous;
import org.rstudio.core.client.SerializedCommand;
import org.rstudio.core.client.SerializedCommandQueue;
import org.rstudio.core.client.command.AppCommand;
import org.rstudio.core.client.dom.DomUtils;
import org.rstudio.core.client.patch.TextChange;
import org.rstudio.core.client.widget.HasFindReplace;
import org.rstudio.core.client.widget.ProgressPanel;
import org.rstudio.core.client.widget.ToolbarButton;
import org.rstudio.core.client.widget.images.ProgressImages;
import org.rstudio.studio.client.RStudioGinjector;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.Value;
import org.rstudio.studio.client.palette.model.CommandPaletteEntrySource;
import org.rstudio.studio.client.palette.model.CommandPaletteItem;
import org.rstudio.studio.client.panmirror.PanmirrorChanges;
import org.rstudio.studio.client.panmirror.PanmirrorCode;
import org.rstudio.studio.client.panmirror.PanmirrorContext;
import org.rstudio.studio.client.panmirror.PanmirrorKeybindings;
import org.rstudio.studio.client.panmirror.PanmirrorOptions;
import org.rstudio.studio.client.panmirror.PanmirrorSetMarkdownResult;
import org.rstudio.studio.client.panmirror.PanmirrorWidget;
import org.rstudio.studio.client.panmirror.command.PanmirrorCommands;
import org.rstudio.studio.client.panmirror.events.PanmirrorFocusEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorNavigationEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorStateChangeEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorUpdatedEvent;
import org.rstudio.studio.client.panmirror.location.PanmirrorEditingOutlineLocation;
import org.rstudio.studio.client.panmirror.location.PanmirrorEditingOutlineLocationItem;
import org.rstudio.studio.client.panmirror.outline.PanmirrorOutlineItemType;
import org.rstudio.studio.client.panmirror.pandoc.PanmirrorPandocFormat;
import org.rstudio.studio.client.panmirror.ui.PanmirrorUIDisplay;
import org.rstudio.studio.client.panmirror.uitools.PanmirrorUITools;
import org.rstudio.studio.client.panmirror.uitools.PanmirrorUIToolsSource;
import org.rstudio.studio.client.server.VoidServerRequestCallback;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.prefs.model.UserPrefs;
import org.rstudio.studio.client.workbench.views.source.Source;
import org.rstudio.studio.client.workbench.views.source.editors.text.AceEditor;
import org.rstudio.studio.client.workbench.views.source.editors.text.DocDisplay;
import org.rstudio.studio.client.workbench.views.source.editors.text.Scope;
import org.rstudio.studio.client.workbench.views.source.editors.text.ScopeList;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTarget;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTargetRMarkdownHelper;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditorContainer;
import org.rstudio.studio.client.workbench.views.source.editors.text.findreplace.FindReplaceBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.rmd.ChunkDefinition;
import org.rstudio.studio.client.workbench.views.source.editors.text.visualmode.events.VisualModeSpellingAddToDictionaryEvent;
import org.rstudio.studio.client.workbench.views.source.events.SourceDocAddedEvent;
import org.rstudio.studio.client.workbench.views.source.model.DirtyState;
import org.rstudio.studio.client.workbench.views.source.model.DocUpdateSentinel;
import org.rstudio.studio.client.workbench.views.source.model.SourcePosition;
import org.rstudio.studio.client.workbench.views.source.model.SourceServerOperations;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Command;
import com.google.inject.Inject;
import elemental2.core.JsObject;
import jsinterop.base.Js;
public class VisualMode implements VisualModeEditorSync,
CommandPaletteEntrySource,
SourceDocAddedEvent.Handler,
VisualModeSpelling.Context,
VisualModeConfirm.Context,
VisualModeSpellingAddToDictionaryEvent.Handler
{
public VisualMode(TextEditingTarget target,
TextEditingTarget.Display view,
TextEditingTargetRMarkdownHelper rmarkdownHelper,
DocDisplay docDisplay,
DirtyState dirtyState,
DocUpdateSentinel docUpdateSentinel,
EventBus eventBus,
final ArrayList<HandlerRegistration> releaseOnDismiss)
{
RStudioGinjector.INSTANCE.injectMembers(this);
target_ = target;
view_ = view;
docDisplay_ = docDisplay;
dirtyState_ = dirtyState;
docUpdateSentinel_ = docUpdateSentinel;
progress_ = new ProgressPanel(ProgressImages.createSmall(), 200);
// create peer helpers
visualModeFormat_ = new VisualModePanmirrorFormat(docUpdateSentinel_, docDisplay_, target_, view_);
visualModeChunks_ = new VisualModeChunks(docUpdateSentinel_, docDisplay_, target_, this);
visualModeLocation_ = new VisualModeEditingLocation(docUpdateSentinel_, docDisplay_);
visualModeWriterOptions_ = new VisualModeMarkdownWriter(docUpdateSentinel_, visualModeFormat_);
visualModeNavigation_ = new VisualModeNavigation(navigationContext_);
visualModeConfirm_ = new VisualModeConfirm(docUpdateSentinel_, docDisplay, this);
visualModeSpelling_ = new VisualModeSpelling(docUpdateSentinel_, docDisplay, this);
visualModeContext_ = new VisualModePanmirrorContext(
docUpdateSentinel_,
target_,
visualModeChunks_,
visualModeFormat_,
visualModeSpelling_
);
// create widgets that the rest of startup (e.g. manageUI) may rely on
initWidgets();
// subscribe to source doc added
releaseOnDismiss.add(eventBus.addHandler(SourceDocAddedEvent.TYPE, this));
// subscribe to spelling invalidation event
releaseOnDismiss.add(eventBus.addHandler(VisualModeSpellingAddToDictionaryEvent.TYPE, this));
// manage UI (then track changes over time)
manageUI(isActivated(), false);
releaseOnDismiss.add(onDocPropChanged(TextEditingTarget.RMD_VISUAL_MODE, (value) -> {
manageUI(isActivated(), true);
}));
// sync to outline visible prop
releaseOnDismiss.add(onDocPropChanged(TextEditingTarget.DOC_OUTLINE_VISIBLE, (value) -> {
withPanmirror(() -> {
panmirror_.showOutline(getOutlineVisible(), getOutlineWidth(), true);
});
}));
}
/**
* Classification of synchronization types from the visual editor to the code
* editor.
*/
public enum SyncType
{
// A normal synchronization (usually performed on idle)
SyncTypeNormal,
// A synchronization performed prior to executing code
SyncTypeExecution,
// A synchronization performed in order to activate the code editor
SyncTypeActivate
}
@Inject
public void initialize(Commands commands,
UserPrefs prefs,
SourceServerOperations source)
{
commands_ = commands;
prefs_ = prefs;
source_ = source;
}
public void onDismiss()
{
}
private void initWidgets()
{
findReplaceButton_ = new ToolbarButton(
ToolbarButton.NoText,
"Find/Replace",
FindReplaceBar.getFindIcon(),
(event) -> {
HasFindReplace findReplace = getFindReplace();
findReplace.showFindReplace(!findReplace.isFindReplaceShowing());
}
);
}
public boolean isActivated()
{
return docUpdateSentinel_.getBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
}
public boolean isVisualEditorActive()
{
return view_.editorContainer().isWidgetActive(panmirror_);
}
public void activate(ScheduledCommand completed)
{
if (!isActivated())
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, true);
manageUI(true, true, completed);
}
else if (isLoading_)
{
onReadyHandlers_.add(completed);
}
else
{
completed.execute();
}
}
public void deactivate(ScheduledCommand completed)
{
if (isActivated())
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
manageUI(false, true, completed);
}
else
{
completed.execute();
}
}
@Override
public void syncToEditor(SyncType syncType)
{
syncToEditor(syncType, null);
}
@Override
public void syncToEditor(SyncType syncType, Command ready)
{
// This is an asynchronous task, that we want to behave in a mostly FIFO
// way when overlapping calls to syncToEditor are made.
// Each syncToEditor operation can be thought of as taking place in three
// phases:
//
// 1 - Synchronously gathering state from panmirror, and kicking off the
// async pandoc operation
// 2 - The pandoc operation itself--this happens completely off the UI
// thread (in a different process in fact)
// 3 - With the result from pandoc, do some synchronous processing, sync
// the source editor, and invoke the `ready` parameter
//
// Part 2 is a "pure" operation so it doesn't matter when it runs. What
// matters is that phase 1 gathers state at the moment it's called, and
// if there are multiple operations in progress simultaneously, that the
// order in which different phase 3's are invoked reflect the order the
// operations were started. For example, if syncToEditor was called once
// (A) and then again (B), any of these sequences are fine:
// A1->A2->A3->B1->B2->B3
// A1->B1->A2->B2->A3->B3
// or even
// A1->B1->B2->A2->A3->B3
// but NOT
// A1->A2->B1->B2->B3->A3
//
// because if A1 comes before B1, then A3 must come before B3.
// Our plan of execution is:
// 1. Start the async operation
// 2a. Wait for the async operation to finish
// 2b. Wait for all preceding async operations to finish
// 3. Run our phase 3 logic and ready.execute()
// 4. Signal to the next succeeding async operation (if any) that we're
// done
// We use syncToEditorQueue_ to enforce the FIFO ordering. Because we
// don't know whether the syncToEditorQueue_ or the pandoc operation will
// finish first, we use a Rendezvous object to make sure both conditions
// are satisfied before we proceed.
Rendezvous rv = new Rendezvous(2);
syncToEditorQueue_.addCommand(new SerializedCommand() {
@Override
public void onExecute(Command continuation)
{
// We pass false to arrive() because it's important to not invoke
// the continuation before our phase 3 work has completed; the whole
// point is to enforce ordering of phase 3.
rv.arrive(() -> {
continuation.execute();
}, false);
}
});
if (isVisualEditorActive() && (syncType == SyncType.SyncTypeActivate || isDirty_)) {
// set flags
isDirty_ = false;
withPanmirror(() -> {
VisualModeMarkdownWriter.Options writerOptions =
visualModeWriterOptions_.optionsFromConfig(panmirror_.getPandocFormatConfig(true));
panmirror_.getMarkdown(writerOptions.options, kSerializationProgressDelayMs,
new CommandWithArg<JsObject>() {
@Override
public void execute(JsObject obj)
{
PanmirrorCode markdown = Js.uncheckedCast(obj);
rv.arrive(() ->
{
if (markdown == null)
{
// note that ready.execute() is never called in the error case
return;
}
// we are about to mutate the document, so create a single
// shot handler that will adjust the known position of
// items in the outline (we do this opportunistically
// unless executing code)
if (markdown.location != null && syncType != SyncType.SyncTypeExecution)
{
final Value<HandlerRegistration> handler = new Value<HandlerRegistration>(null);
handler.setValue(docDisplay_.addScopeTreeReadyHandler((evt) ->
{
alignScopeOutline(markdown.location);
handler.getValue().removeHandler();
}));
}
// apply diffs unless the wrap column changed (too expensive)
if (!writerOptions.wrapChanged)
{
TextEditorContainer.Changes changes = toEditorChanges(markdown);
getSourceEditor().applyChanges(changes, syncType == SyncType.SyncTypeActivate);
}
else
{
getSourceEditor().setCode(markdown.code);
}
// if the format comment has changed then show the reload prompt
if (panmirrorFormatConfig_.requiresReload())
{
view_.showPanmirrorFormatChanged(() ->
{
// dismiss the warning bar
view_.hideWarningBar();
// this will trigger the refresh b/c the format changed
syncFromEditorIfActivated();
});
}
if (markdown.location != null && syncType == SyncType.SyncTypeExecution)
{
// if syncing for execution, force a rebuild of the scope tree
alignScopeOutline(markdown.location);
}
// invoke ready callback if supplied
if (ready != null)
{
ready.execute();
}
}, true);
}
});
});
} else {
// Even if ready is null, it's important to arrive() so the
// syncToEditorQueue knows it can continue
rv.arrive(() ->
{
if (ready != null) {
ready.execute();
}
}, true);
}
}
@Override
public void syncFromEditorIfActivated()
{
if (isActivated())
{
// new editor content about to be sent to prosemirror, validate that we can edit it
String invalid = validateActivation();
if (invalid != null)
{
deactivateForInvalidSource(invalid);
return;
}
// get reference to the editing container
TextEditorContainer editorContainer = view_.editorContainer();
// show progress
progress_.beginProgressOperation(400);
editorContainer.activateWidget(progress_);
syncFromEditor((success) -> {
// clear progress
progress_.endProgressOperation();
// re-activate panmirror widget
editorContainer.activateWidget(panmirror_, false);
}, false);
}
}
@Override
public void syncFromEditor(final CommandWithArg<Boolean> done, boolean focus)
{
// flag to prevent the document being set to dirty when loading
// from source mode
loadingFromSource_ = true;
// if there is a previous format comment and it's changed then
// we need to tear down the editor instance and create a new one
if (panmirrorFormatConfig_ != null && panmirrorFormatConfig_.requiresReload())
{
panmirrorFormatConfig_ = null;
view_.editorContainer().removeWidget(panmirror_);
panmirror_ = null;
}
withPanmirror(() -> {
final String editorCode = getEditorCode();
final VisualModeMarkdownWriter.Options writerOptions = visualModeWriterOptions_.optionsFromCode(editorCode);
// serialize these calls (they are expensive on both the server side for the call(s)
// to pandoc, and on the client side for initialization of the editor (esp. ace editors)
setMarkdownQueue_.addTask(new PreemptiveTaskQueue.Task()
{
@Override
public String getLabel()
{
return target_.getTitle();
}
@Override
public boolean shouldPreempt()
{
return target_.isActiveDocument();
}
@Override
public void execute(final Command taskDone)
{
// join done commands
final CommandWithArg<Boolean> allDone = (result) -> {
taskDone.execute();
if (done != null)
done.execute(result);
};
panmirror_.setMarkdown(editorCode, writerOptions.options, true, kCreationProgressDelayMs,
new CommandWithArg<JsObject>() {
@Override
public void execute(JsObject obj)
{
// get result
PanmirrorSetMarkdownResult result = Js.uncheckedCast(obj);
// update flags
isDirty_ = false;
loadingFromSource_ = false;
// bail on error
if (result == null)
{
allDone.execute(false);
return;
}
// show warning and terminate if there was unparsed metadata. note that the other
// option here would be to have setMarkdown send the unparsed metadata back to the
// server to generate yaml, and then include the metadata as yaml at end the of the
// document. this could be done using the method outlined here:
// https://github.com/jgm/pandoc/issues/2019
// specifically using this template:
/*
$if(titleblock)$
$titleblock$
$else$
--- {}
$endif$
*/
/// ...with this command line:
/*
pandoc -t markdown --template=yaml.template foo.md
*/
if (JsObject.keys(result.unparsed_meta).length > 0)
{
view_.showWarningBar("Unable to activate visual mode (unsupported front matter format or non top-level YAML block)");
allDone.execute(false);
return;
}
// clear progress (for possible dialog overlays created by confirmation)
progress_.endProgressOperation();
// confirm if necessary
visualModeConfirm_.withSwitchConfirmation(
// allow inspection of result
result,
// onConfirmed
() -> {
// if pandoc's view of the document doesn't match the editor's we
// need to reset the editor's code (for both dirty state and
// so that diffs are efficient)
if (result.canonical != editorCode)
{
getSourceEditor().setCode(result.canonical);
markDirty();
}
// completed
allDone.execute(true);
// deferred actions
Scheduler.get().scheduleDeferred(() -> {
// if we are being focused it means we are switching from source mode, in that
// case sync our editing location to what it is in source
if (focus)
{
panmirror_.focus();
panmirror_.setEditingLocation(
visualModeLocation_.getSourceOutlineLocation(),
visualModeLocation_.savedEditingLocation()
);
}
// show any warnings
PanmirrorPandocFormat format = panmirror_.getPandocFormat();
if (result.unrecognized.length > 0)
{
view_.showWarningBar("Unrecognized Pandoc token(s); " + String.join(", ", result.unrecognized));
}
else if (format.warnings.invalidFormat.length() > 0)
{
view_.showWarningBar("Invalid Pandoc format: " + format.warnings.invalidFormat);
}
else if (format.warnings.invalidOptions.length > 0)
{
view_.showWarningBar("Unsupported extensions for markdown mode: " + String.join(", ", format.warnings.invalidOptions));;
}
});
},
// onCancelled
() -> {
allDone.execute(false);
}
);
}
});
}
});
});
}
public boolean canWriteCanonical()
{
return validateActivation() == null;
}
public void getCanonicalChanges(String code, CommandWithArg<PanmirrorChanges> completed)
{
withPanmirror(() -> {
VisualModeMarkdownWriter.Options writerOptions = visualModeWriterOptions_.optionsFromCode(code);
panmirror_.getCanonical(code, writerOptions.options, kSerializationProgressDelayMs,
(markdown) -> {
if (markdown != null)
{
if (!writerOptions.wrapChanged)
{
PanmirrorUIToolsSource sourceTools = new PanmirrorUITools().source;
TextChange[] changes = sourceTools.diffChars(code, markdown, 1);
completed.execute(new PanmirrorChanges(null, changes));
}
else
{
completed.execute(new PanmirrorChanges(markdown, null));
}
}
else
{
completed.execute(null);
}
});
});
}
/**
* Returns the width of the entire visual editor
*
* @return The visual editor's width.
*/
public int getPixelWidth()
{
return panmirror_.getOffsetWidth();
}
/**
* Returns the width of the content inside the visual editor
*
* @return Width of content.
*/
public int getContentWidth()
{
Element[] elements = DomUtils.getElementsByClassName(panmirror_.getElement(),
"pm-content");
if (elements.length < 1)
{
// if no root node, use the entire surface
return getPixelWidth();
}
return elements[0].getOffsetWidth();
}
public void manageCommands()
{
// hookup devtools
syncDevTools();
// disable commands
disableForVisualMode(
// Disabled since it just opens the scope tree widget (which doens't
// exist in visual mode)
commands_.jumpTo(),
// Disabled since diagnostics aren't active in visual mode
commands_.showDiagnosticsActiveDocument(),
// Disabled since we can't meaningfully select instances in several
// embedded editors simultaneously
commands_.findSelectAll(),
// Disabled since code folding doesn't work in embedded editors (there's
// no gutter in which to toggle folds)
commands_.fold(),
commands_.foldAll(),
commands_.unfold(),
commands_.unfoldAll(),
// Disabled since we don't have line numbers in the visual editor
commands_.goToLine()
);
// initially disable code commands (they will be re-enabled later when an
// editor has focus)
if (isActivated())
{
setCodeCommandsEnabled(false);
}
}
public void unmanageCommands()
{
restoreDisabledForVisualMode();
}
public void insertChunk(String chunkPlaceholder, int rowOffset, int colOffset)
{
panmirror_.insertChunk(chunkPlaceholder, rowOffset, colOffset);
}
/**
* Perform a command after synchronizing the selection state of the visual
* editor. Note that the command will not be performed unless focus is in a
* code editor (as otherwise we can't map selection 1-1).
*
* @param command
*/
public void performWithSelection(Command command)
{
// Drive focus to the editing surface. This is necessary so we correctly
// identify the active (focused) editor on which to perform the command.
panmirror_.focus();
// Perform the command in the active code editor, if any.
visualModeChunks_.performWithSelection(command);
}
public DocDisplay getActiveEditor()
{
return activeEditor_;
}
/**
* Sets the active (currently focused) code chunk editor.
*
* @param editor The current code chunk editor, or null if no code chunk
* editor has focus.
*/
public void setActiveEditor(DocDisplay editor)
{
activeEditor_ = editor;
if (editor != null)
{
// A code chunk has focus; enable code commands
setCodeCommandsEnabled(true);
}
}
/**
* Sets the enabled state for code commands -- i.e. those that require
* selection to be inside a chunk of code. We disable these outside code
* chunks.
*
* @param enabled Whether to enable code commands
*/
private void setCodeCommandsEnabled(boolean enabled)
{
AppCommand[] commands = {
commands_.commentUncomment(),
commands_.executeCode(),
commands_.executeCodeWithoutFocus(),
commands_.executeCodeWithoutMovingCursor(),
commands_.executeCurrentFunction(),
commands_.executeCurrentLine(),
commands_.executeCurrentParagraph(),
commands_.executeCurrentSection(),
commands_.executeCurrentStatement(),
commands_.executeFromCurrentLine(),
commands_.executeToCurrentLine(),
commands_.extractFunction(),
commands_.extractLocalVariable(),
commands_.goToDefinition(),
commands_.insertRoxygenSkeleton(),
commands_.profileCode(),
commands_.profileCodeWithoutFocus(),
commands_.reflowComment(),
commands_.reformatCode(),
commands_.reindent(),
commands_.renameInScope(),
commands_.runSelectionAsJob(),
commands_.runSelectionAsLauncherJob(),
commands_.sendToTerminal(),
};
for (AppCommand command : commands)
{
if (command.isVisible())
{
command.setEnabled(enabled);
}
}
}
public void goToNextSection()
{
panmirror_.execCommand(PanmirrorCommands.GoToNextSection);
}
public void goToPreviousSection()
{
panmirror_.execCommand(PanmirrorCommands.GoToPreviousSection);
}
public HasFindReplace getFindReplace()
{
if (panmirror_ != null) {
return panmirror_.getFindReplace();
} else {
return new HasFindReplace() {
public boolean isFindReplaceShowing() { return false; }
public void showFindReplace(boolean defaultForward) {}
public void hideFindReplace() {}
public void findNext() {}
public void findPrevious() {}
public void replaceAndFind() {}
};
}
}
public ToolbarButton getFindReplaceButton()
{
return findReplaceButton_;
}
public void checkSpelling()
{
visualModeSpelling_.checkSpelling(panmirror_.getSpellingDoc());
}
@Override
public void invalidateAllWords()
{
if (panmirror_ != null)
panmirror_.spellingInvalidateAllWords();
}
@Override
public void invalidateWord(String word)
{
if (panmirror_ != null)
panmirror_.spellingInvalidateWord(word);
}
@Override
public void onVisualModeSpellingAddToDictionary(VisualModeSpellingAddToDictionaryEvent event)
{
if (panmirror_ != null)
panmirror_.spellingInvalidateWord(event.getWord());
}
public boolean isVisualModePosition(SourcePosition position)
{
return visualModeNavigation_.isVisualModePosition(position);
}
public void navigate(SourcePosition position, boolean recordCurrentPosition)
{
visualModeNavigation_.navigate(position, recordCurrentPosition);
}
public void navigateToXRef(String xref, boolean recordCurrentPosition)
{
visualModeNavigation_.navigateToXRef(xref, recordCurrentPosition);
}
public void recordCurrentNavigationPosition()
{
visualModeNavigation_.recordCurrentNavigationPosition();
}
public SourcePosition getSourcePosition()
{
return visualModeNavigation_.getSourcePosition();
}
public boolean isAtRow(SourcePosition position)
{
if (visualModeNavigation_.isVisualModePosition(position))
{
return position.getRow() == getSourcePosition().getRow();
}
else
{
return false;
}
}
@Override
public String getYamlFrontMatter()
{
return panmirror_.getYamlFrontMatter();
}
@Override
public boolean applyYamlFrontMatter(String yaml)
{
panmirror_.applyYamlFrontMatter(yaml);
return true;
}
public void activateDevTools()
{
withPanmirror(() -> {
panmirror_.activateDevTools();
});
}
@Override
public void onSourceDocAdded(SourceDocAddedEvent e)
{
if (e.getDoc().getId() != docUpdateSentinel_.getId())
return;
// when interactively adding a visual mode doc, make sure we set the focus
// (special handling required b/c initialization of visual mode docs is
// async so can miss the normal setting of focus)
if (e.getMode() == Source.OPEN_INTERACTIVE && isActivated() && target_.isActiveDocument())
{
if (panmirror_ != null)
{
panmirror_.focus();
}
else if (isLoading_)
{
onReadyHandlers_.add(() -> panmirror_.focus());
}
}
}
public void onClosing()
{
if (syncOnIdle_ != null)
syncOnIdle_.suspend();
if (saveLocationOnIdle_ != null)
saveLocationOnIdle_.suspend();
}
public VisualModeChunk getChunkAtRow(int row)
{
return visualModeChunks_.getChunkAtRow(row);
}
public JsArray<ChunkDefinition> getChunkDefs()
{
return visualModeChunks_.getChunkDefs();
}
public ChunkDefinition getChunkDefAtRow(int row)
{
VisualModeChunk chunk = getChunkAtRow(row);
if (chunk == null)
return null;
return chunk.getDefinition();
}
@Override
public List<CommandPaletteItem> getCommandPaletteItems()
{
return panmirror_.getCommandPaletteItems();
}
public void focus(Command onComplete)
{
activate(() ->
{
panmirror_.focus();
if (onComplete != null)
{
onComplete.execute();
}
});
}
public void setChunkLineExecState(int start, int end, int state)
{
visualModeChunks_.setChunkLineExecState(start, end, state);
}
public void setChunkState(Scope chunk, int state)
{
visualModeChunks_.setChunkState(chunk, state);
}
public void onUserSwitchingToVisualMode()
{
visualModeConfirm_.onUserSwitchToVisualModePending();
}
public String getSelectedText()
{
return panmirror_.getSelectedText();
}
public void replaceSelection(String value)
{
panmirror_.replaceSelection(value);
}
private void manageUI(boolean activate, boolean focus)
{
manageUI(activate, focus, () -> {});
}
private void manageUI(boolean activate, boolean focus, ScheduledCommand completed)
{
// validate the activation
if (activate)
{
String invalid = validateActivation();
if (invalid != null)
{
deactivateWithMessage(invalid);
return;
}
}
// manage commands
manageCommands();
// manage toolbar buttons / menus in display
view_.manageCommandUI();
// get references to the editing container and it's source editor
TextEditorContainer editorContainer = view_.editorContainer();
// visual mode enabled (panmirror editor)
if (activate)
{
// set flag indicating that we are loading
isLoading_ = true;
// show progress (as this may well require either loading the
// panmirror library for the first time or a reload of visual mode,
// which is normally instant but for very, very large documents
// can take a couple of seconds)
progress_.beginProgressOperation(400);
editorContainer.activateWidget(progress_);
CommandWithArg<Boolean> done = (success) -> {
// clear progress
progress_.endProgressOperation();
if (success)
{
// sync to editor outline prefs
panmirror_.showOutline(establishOutlineVisible(), getOutlineWidth());
// show find replace button
findReplaceButton_.setVisible(true);
// activate widget
editorContainer.activateWidget(panmirror_, focus);
// begin save-on-idle behavior
syncOnIdle_.resume();
saveLocationOnIdle_.resume();
// (re)inject notebook output from the editor
target_.getNotebook().migrateCodeModeOutput();
// execute completed hook
Scheduler.get().scheduleDeferred(completed);
// clear loading flag and execute any onReady handlers
isLoading_ = false;
onReadyHandlers_.forEach(handler -> { Scheduler.get().scheduleDeferred(handler); });
onReadyHandlers_.clear();
}
else
{
editorContainer.activateEditor(focus);
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
}
};
withPanmirror(() -> {
// if we aren't currently active then set our markdown based
// on what's currently in the source ditor
if (!isVisualEditorActive())
{
syncFromEditor(done, focus);
}
else
{
done.execute(true);
}
});
}
// visual mode not enabled (source editor)
else
{
Command activateSourceEditor = () -> {
unmanageCommands();
// hide find replace button
findReplaceButton_.setVisible(false);
editorContainer.activateEditor(focus);
if (syncOnIdle_ != null)
syncOnIdle_.suspend();
if (saveLocationOnIdle_ != null)
saveLocationOnIdle_.suspend();
// move notebook outputs from visual mode
target_.getNotebook().migrateVisualModeOutput();
// execute completed hook
Scheduler.get().scheduleDeferred(completed);
};
// if we are deactivating to allow the user to edit invalid source code then don't sync
// back to the source editor (as this would have happened b/c we inspected the contents
// of the source editor in syncFromEditorIfActivated() and decided we couldn't edit it)
if (deactivatingForInvalidSource_)
{
deactivatingForInvalidSource_ = false;
activateSourceEditor.execute();
}
else
{
syncToEditor(SyncType.SyncTypeActivate, activateSourceEditor);
}
}
}
private void markDirty()
{
dirtyState_.markDirty(true);
source_.setSourceDocumentDirty(
docUpdateSentinel_.getId(), true,
new VoidServerRequestCallback());
}
private TextEditorContainer.Changes toEditorChanges(PanmirrorCode panmirrorCode)
{
// code to diff
String fromCode = getEditorCode();
String toCode = panmirrorCode.code;
// do the diff (timeout after 1 second). note that we only do this
// once the user has stopped typing for 1 second so it's not something
// that will run continuously during editing (in which case a much
// lower timeout would be warranted). note also that timeouts are for
// the diff planning phase so we will still get a valid diff back
// even if the timeout occurs.
PanmirrorUIToolsSource sourceTools = new PanmirrorUITools().source;
TextChange[] changes = sourceTools.diffChars(fromCode, toCode, 1);
// return changes w/ cursor
return new TextEditorContainer.Changes(
changes,
panmirrorCode.selection_only
? new TextEditorContainer.Navigator()
{
@Override
public void onNavigate(DocDisplay docDisplay)
{
visualModeLocation_.setSourceOutlineLocation(panmirrorCode.location);
}
}
: null
);
}
private void syncDevTools()
{
if (panmirror_ != null && panmirror_.devToolsLoaded())
panmirror_.activateDevTools();
}
private void withPanmirror(Command ready)
{
if (panmirror_ == null)
{
// create panmirror (no progress b/c we alread have pane progress)
PanmirrorContext context = createPanmirrorContext();
PanmirrorOptions options = panmirrorOptions();
PanmirrorWidget.Options widgetOptions = new PanmirrorWidget.Options();
PanmirrorWidget.create(context, visualModeFormat_.formatSource(),
options, widgetOptions, kCreationProgressDelayMs, (panmirror) -> {
// save reference to panmirror
panmirror_ = panmirror;
// track format comment (used to detect when we need to reload for a new format)
panmirrorFormatConfig_ = new VisualModeReloadChecker(view_);
// remove some keybindings that conflict with the ide
// (currently no known conflicts)
disableKeys();
// periodically sync edits back to main editor
syncOnIdle_ = new DebouncedCommand(1000)
{
@Override
protected void execute()
{
if (isDirty_ && !panmirror_.isInitialDoc())
syncToEditor(SyncType.SyncTypeNormal);
}
};
// periodically save selection
saveLocationOnIdle_ = new DebouncedCommand(1000)
{
@Override
protected void execute()
{
visualModeLocation_.saveEditingLocation(panmirror_.getEditingLocation());
}
};
// set dirty flag + nudge idle sync on change
panmirror_.addPanmirrorUpdatedHandler(new PanmirrorUpdatedEvent.Handler()
{
@Override
public void onPanmirrorUpdated(PanmirrorUpdatedEvent event)
{
// set flag and nudge sync on idle
isDirty_ = true;
syncOnIdle_.nudge();
// update editor dirty state if necessary
if (!loadingFromSource_ && !dirtyState_.getValue())
markDirty();
}
});
// save selection
panmirror_.addPanmirrorStateChangeHandler(new PanmirrorStateChangeEvent.Handler()
{
@Override
public void onPanmirrorStateChange(PanmirrorStateChangeEvent event)
{
saveLocationOnIdle_.nudge();
}
});
// forward navigation event
panmirror_.addPanmirrorNavigationHandler(new PanmirrorNavigationEvent.Handler()
{
@Override
public void onPanmirrorNavigation(PanmirrorNavigationEvent event)
{
visualModeNavigation_.onNavigated(event.getNavigation());
}
});
// check for external edit on focus
panmirror_.addPanmirrorFocusHandler(new PanmirrorFocusEvent.Handler()
{
@Override
public void onPanmirrorFocus(PanmirrorFocusEvent event)
{
target_.checkForExternalEdit(100);
// Disable code-related commands, on the presumption that we
// are in a prose region of the document. These commands will
// be re-enabled shortly if focus is sent to a code chunk, and
// will remain disabled if we aren't.
//
// Note that the PanmirrorFocusEvent is fired when selection
// exits a code chunk as well as when the entire widget loses
// focus.
setCodeCommandsEnabled(false);
// Also clear the last focused Ace editor. This is normally
// used by addins which need to target the 'active' editor,
// with the 'active' state persisting after other UI elements
// (e.g. the Addins toolbar) has been clicked. However, if
// focus has been moved to a new editor context, then we instead
// want to clear that state.
AceEditor.clearLastFocusedEditor();
}
});
// track changes in outline sidebar and save as prefs
panmirror_.addPanmirrorOutlineVisibleHandler((event) -> {
setOutlineVisible(event.getVisible());
});
panmirror_.addPanmirrorOutlineWidthHandler((event) -> {
setOutlineWidth(event.getWidth());
});
// manage latch state of findreplace button
panmirror_.addPanmirrorFindReplaceVisibleHandler((event) -> {
findReplaceButton_.setLeftImage(event.getVisible()
? FindReplaceBar.getFindLatchedIcon()
: FindReplaceBar.getFindIcon());
});
// good to go!
ready.execute();
});
}
else
{
// panmirror already created
ready.execute();
}
}
private PanmirrorContext createPanmirrorContext()
{
PanmirrorUIDisplay.ShowContextMenu showContextMenu = (commands, clientX, clientY) -> {
return panmirror_.showContextMenu(commands, clientX, clientY);
};
return visualModeContext_.createContext(showContextMenu);
}
private String getEditorCode()
{
return VisualModeUtil.getEditorCode(view_);
}
private TextEditorContainer.Editor getSourceEditor()
{
return view_.editorContainer().getEditor();
}
private boolean establishOutlineVisible()
{
return target_.establishPreferredOutlineWidgetVisibility(
prefs_.visualMarkdownEditingShowDocOutline().getValue()
);
}
private boolean getOutlineVisible()
{
return target_.getPreferredOutlineWidgetVisibility(
prefs_.visualMarkdownEditingShowDocOutline().getValue()
);
}
private void setOutlineVisible(boolean visible)
{
target_.setPreferredOutlineWidgetVisibility(visible);
}
private double getOutlineWidth()
{
return target_.getPreferredOutlineWidgetSize();
}
private void setOutlineWidth(double width)
{
target_.setPreferredOutlineWidgetSize(width);
}
private void disableKeys(String... commands)
{
PanmirrorKeybindings keybindings = disabledKeybindings(commands);
panmirror_.setKeybindings(keybindings);
}
private PanmirrorKeybindings disabledKeybindings(String... commands)
{
PanmirrorKeybindings keybindings = new PanmirrorKeybindings();
for (String command : commands)
keybindings.add(command, new String[0]);
return keybindings;
}
private void disableForVisualMode(AppCommand... commands)
{
if (isActivated())
{
for (AppCommand command : commands)
{
if (command.isVisible() && command.isEnabled())
{
command.setEnabled(false);
if (!disabledForVisualMode_.contains(command))
disabledForVisualMode_.add(command);
}
}
}
}
private void restoreDisabledForVisualMode()
{
disabledForVisualMode_.forEach((command) -> {
command.setEnabled(true);
});
disabledForVisualMode_.clear();
}
private HandlerRegistration onDocPropChanged(String prop, ValueChangeHandler<String> handler)
{
return docUpdateSentinel_.addPropertyValueChangeHandler(prop, handler);
}
private VisualModeNavigation.Context navigationContext_ = new VisualModeNavigation.Context() {
@Override
public String getId()
{
return docUpdateSentinel_.getId();
}
@Override
public String getPath()
{
return docUpdateSentinel_.getPath();
}
@Override
public PanmirrorWidget panmirror()
{
return panmirror_;
}
};
private PanmirrorOptions panmirrorOptions()
{
// create options
PanmirrorOptions options = new PanmirrorOptions();
// use embedded codemirror for code blocks
options.codeEditor = prefs_.visualMarkdownCodeEditor().getValue();
// highlight rmd example chunks
options.rmdExampleHighlight = true;
// add focus-visible class to prevent interaction with focus-visible.js
// (it ends up attempting to apply the "focus-visible" class b/c ProseMirror
// is contentEditable, and that triggers a dom mutation event for ProseMirror,
// which in turn causes us to lose table selections)
options.className = "focus-visible";
return options;
}
private String validateActivation()
{
if (this.docDisplay_.hasActiveCollabSession())
{
return "You cannot enter visual mode while using realtime collaboration.";
}
else if (BrowseCap.isInternetExplorer())
{
return "Visual mode is not supported in Internet Explorer.";
}
else
{
return visualModeFormat_.validateSourceForVisualMode();
}
}
private void deactivateForInvalidSource(String invalid)
{
deactivatingForInvalidSource_ = true;
deactivateWithMessage(invalid);
}
private void deactivateWithMessage(String message)
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
view_.showWarningBar(message);
}
/**
* Align the document's scope tree with the code chunks in visual mode.
*
* @param location Array of outline locations from visual mode
*/
private void alignScopeOutline(PanmirrorEditingOutlineLocation location)
{
// Get all of the chunks from the document (code view)
ArrayList<Scope> chunkScopes = new ArrayList<Scope>();
ScopeList chunks = new ScopeList(docDisplay_);
chunks.selectAll(ScopeList.CHUNK);
for (Scope chunk : chunks)
{
chunkScopes.add(chunk);
}
// Get all of the chunks from the outline emitted by visual mode
ArrayList<PanmirrorEditingOutlineLocationItem> chunkItems =
new ArrayList<PanmirrorEditingOutlineLocationItem>();
for (int j = 0; j < location.items.length; j++)
{
if (location.items[j].type == PanmirrorOutlineItemType.RmdChunk)
{
chunkItems.add(location.items[j]);
}
}
// Refuse to proceed if cardinality doesn't match (consider: does this
// need to account for deeply nested chunks that might appear in one
// outline but not the other?)
if (chunkScopes.size() != chunkItems.size())
{
Debug.logWarning(chunkScopes.size() + " chunks in scope tree, but " +
chunkItems.size() + " chunks in visual editor.");
return;
}
for (int k = 0; k < chunkItems.size(); k++)
{
PanmirrorEditingOutlineLocationItem visualItem =
Js.uncheckedCast(chunkItems.get(k));
VisualModeChunk chunk = visualModeChunks_.getChunkAtVisualPosition(
visualItem.position);
if (chunk == null)
{
// This is normal; it is possible that we haven't created a chunk
// editor at this position yet.
continue;
}
chunk.setScope(chunkScopes.get(k));
}
}
private Commands commands_;
private UserPrefs prefs_;
private SourceServerOperations source_;
private DocDisplay activeEditor_; // the current embedded editor
private final TextEditingTarget target_;
private final TextEditingTarget.Display view_;
private final DocDisplay docDisplay_; // the parent editor
private final DirtyState dirtyState_;
private final DocUpdateSentinel docUpdateSentinel_;
private final VisualModePanmirrorFormat visualModeFormat_;
private final VisualModeChunks visualModeChunks_;
private final VisualModePanmirrorContext visualModeContext_;
private final VisualModeEditingLocation visualModeLocation_;
private final VisualModeMarkdownWriter visualModeWriterOptions_;
private final VisualModeNavigation visualModeNavigation_;
private final VisualModeConfirm visualModeConfirm_;
private final VisualModeSpelling visualModeSpelling_;
private VisualModeReloadChecker panmirrorFormatConfig_;
private DebouncedCommand syncOnIdle_;
private DebouncedCommand saveLocationOnIdle_;
private boolean isDirty_ = false;
private boolean loadingFromSource_ = false;
private boolean deactivatingForInvalidSource_ = false;
private PanmirrorWidget panmirror_;
private ToolbarButton findReplaceButton_;
private ArrayList<AppCommand> disabledForVisualMode_ = new ArrayList<AppCommand>();
private final ProgressPanel progress_;
private SerializedCommandQueue syncToEditorQueue_ = new SerializedCommandQueue();
private boolean isLoading_ = false;
private List<ScheduledCommand> onReadyHandlers_ = new ArrayList<ScheduledCommand>();
private static final int kCreationProgressDelayMs = 0;
private static final int kSerializationProgressDelayMs = 5000;
// priority task queue for expensive calls to panmirror_.setMarkdown
// (currently active tab bumps itself up in priority)
private static PreemptiveTaskQueue setMarkdownQueue_ = new PreemptiveTaskQueue(true, false);
}
| src/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/visualmode/VisualMode.java | /*
* VisualMode.java
*
* Copyright (C) 2020 by RStudio, PBC
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.views.source.editors.text.visualmode;
import java.util.ArrayList;
import java.util.List;
import org.rstudio.core.client.BrowseCap;
import org.rstudio.core.client.CommandWithArg;
import org.rstudio.core.client.DebouncedCommand;
import org.rstudio.core.client.Debug;
import org.rstudio.core.client.PreemptiveTaskQueue;
import org.rstudio.core.client.Rendezvous;
import org.rstudio.core.client.SerializedCommand;
import org.rstudio.core.client.SerializedCommandQueue;
import org.rstudio.core.client.command.AppCommand;
import org.rstudio.core.client.dom.DomUtils;
import org.rstudio.core.client.patch.TextChange;
import org.rstudio.core.client.widget.HasFindReplace;
import org.rstudio.core.client.widget.ProgressPanel;
import org.rstudio.core.client.widget.ToolbarButton;
import org.rstudio.core.client.widget.images.ProgressImages;
import org.rstudio.studio.client.RStudioGinjector;
import org.rstudio.studio.client.application.events.EventBus;
import org.rstudio.studio.client.common.Value;
import org.rstudio.studio.client.palette.model.CommandPaletteEntrySource;
import org.rstudio.studio.client.palette.model.CommandPaletteItem;
import org.rstudio.studio.client.panmirror.PanmirrorChanges;
import org.rstudio.studio.client.panmirror.PanmirrorCode;
import org.rstudio.studio.client.panmirror.PanmirrorContext;
import org.rstudio.studio.client.panmirror.PanmirrorKeybindings;
import org.rstudio.studio.client.panmirror.PanmirrorOptions;
import org.rstudio.studio.client.panmirror.PanmirrorSetMarkdownResult;
import org.rstudio.studio.client.panmirror.PanmirrorWidget;
import org.rstudio.studio.client.panmirror.command.PanmirrorCommands;
import org.rstudio.studio.client.panmirror.events.PanmirrorFocusEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorNavigationEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorStateChangeEvent;
import org.rstudio.studio.client.panmirror.events.PanmirrorUpdatedEvent;
import org.rstudio.studio.client.panmirror.location.PanmirrorEditingOutlineLocation;
import org.rstudio.studio.client.panmirror.location.PanmirrorEditingOutlineLocationItem;
import org.rstudio.studio.client.panmirror.outline.PanmirrorOutlineItemType;
import org.rstudio.studio.client.panmirror.pandoc.PanmirrorPandocFormat;
import org.rstudio.studio.client.panmirror.ui.PanmirrorUIDisplay;
import org.rstudio.studio.client.panmirror.uitools.PanmirrorUITools;
import org.rstudio.studio.client.panmirror.uitools.PanmirrorUIToolsSource;
import org.rstudio.studio.client.server.VoidServerRequestCallback;
import org.rstudio.studio.client.workbench.commands.Commands;
import org.rstudio.studio.client.workbench.prefs.model.UserPrefs;
import org.rstudio.studio.client.workbench.views.source.Source;
import org.rstudio.studio.client.workbench.views.source.editors.text.AceEditor;
import org.rstudio.studio.client.workbench.views.source.editors.text.DocDisplay;
import org.rstudio.studio.client.workbench.views.source.editors.text.Scope;
import org.rstudio.studio.client.workbench.views.source.editors.text.ScopeList;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTarget;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditingTargetRMarkdownHelper;
import org.rstudio.studio.client.workbench.views.source.editors.text.TextEditorContainer;
import org.rstudio.studio.client.workbench.views.source.editors.text.findreplace.FindReplaceBar;
import org.rstudio.studio.client.workbench.views.source.editors.text.rmd.ChunkDefinition;
import org.rstudio.studio.client.workbench.views.source.editors.text.visualmode.events.VisualModeSpellingAddToDictionaryEvent;
import org.rstudio.studio.client.workbench.views.source.events.SourceDocAddedEvent;
import org.rstudio.studio.client.workbench.views.source.model.DirtyState;
import org.rstudio.studio.client.workbench.views.source.model.DocUpdateSentinel;
import org.rstudio.studio.client.workbench.views.source.model.SourcePosition;
import org.rstudio.studio.client.workbench.views.source.model.SourceServerOperations;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.Command;
import com.google.inject.Inject;
import elemental2.core.JsObject;
import jsinterop.base.Js;
public class VisualMode implements VisualModeEditorSync,
CommandPaletteEntrySource,
SourceDocAddedEvent.Handler,
VisualModeSpelling.Context,
VisualModeConfirm.Context,
VisualModeSpellingAddToDictionaryEvent.Handler
{
public VisualMode(TextEditingTarget target,
TextEditingTarget.Display view,
TextEditingTargetRMarkdownHelper rmarkdownHelper,
DocDisplay docDisplay,
DirtyState dirtyState,
DocUpdateSentinel docUpdateSentinel,
EventBus eventBus,
final ArrayList<HandlerRegistration> releaseOnDismiss)
{
RStudioGinjector.INSTANCE.injectMembers(this);
target_ = target;
view_ = view;
docDisplay_ = docDisplay;
dirtyState_ = dirtyState;
docUpdateSentinel_ = docUpdateSentinel;
progress_ = new ProgressPanel(ProgressImages.createSmall(), 200);
// create peer helpers
visualModeFormat_ = new VisualModePanmirrorFormat(docUpdateSentinel_, docDisplay_, target_, view_);
visualModeChunks_ = new VisualModeChunks(docUpdateSentinel_, docDisplay_, target_, this);
visualModeLocation_ = new VisualModeEditingLocation(docUpdateSentinel_, docDisplay_);
visualModeWriterOptions_ = new VisualModeMarkdownWriter(docUpdateSentinel_, visualModeFormat_);
visualModeNavigation_ = new VisualModeNavigation(navigationContext_);
visualModeConfirm_ = new VisualModeConfirm(docUpdateSentinel_, docDisplay, this);
visualModeSpelling_ = new VisualModeSpelling(docUpdateSentinel_, docDisplay, this);
visualModeContext_ = new VisualModePanmirrorContext(
docUpdateSentinel_,
target_,
visualModeChunks_,
visualModeFormat_,
visualModeSpelling_
);
// create widgets that the rest of startup (e.g. manageUI) may rely on
initWidgets();
// subscribe to source doc added
releaseOnDismiss.add(eventBus.addHandler(SourceDocAddedEvent.TYPE, this));
// subscribe to spelling invalidation event
releaseOnDismiss.add(eventBus.addHandler(VisualModeSpellingAddToDictionaryEvent.TYPE, this));
// manage UI (then track changes over time)
manageUI(isActivated(), false);
releaseOnDismiss.add(onDocPropChanged(TextEditingTarget.RMD_VISUAL_MODE, (value) -> {
manageUI(isActivated(), true);
}));
// sync to outline visible prop
releaseOnDismiss.add(onDocPropChanged(TextEditingTarget.DOC_OUTLINE_VISIBLE, (value) -> {
withPanmirror(() -> {
panmirror_.showOutline(getOutlineVisible(), getOutlineWidth(), true);
});
}));
}
/**
* Classification of synchronization types from the visual editor to the code
* editor.
*/
public enum SyncType
{
// A normal synchronization (usually performed on idle)
SyncTypeNormal,
// A synchronization performed prior to executing code
SyncTypeExecution,
// A synchronization performed in order to activate the code editor
SyncTypeActivate
}
@Inject
public void initialize(Commands commands,
UserPrefs prefs,
SourceServerOperations source)
{
commands_ = commands;
prefs_ = prefs;
source_ = source;
}
public void onDismiss()
{
}
private void initWidgets()
{
findReplaceButton_ = new ToolbarButton(
ToolbarButton.NoText,
"Find/Replace",
FindReplaceBar.getFindIcon(),
(event) -> {
HasFindReplace findReplace = getFindReplace();
findReplace.showFindReplace(!findReplace.isFindReplaceShowing());
}
);
}
public boolean isActivated()
{
return docUpdateSentinel_.getBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
}
public boolean isVisualEditorActive()
{
return view_.editorContainer().isWidgetActive(panmirror_);
}
public void activate(ScheduledCommand completed)
{
if (!isActivated())
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, true);
manageUI(true, true, completed);
}
else if (isLoading_)
{
onReadyHandlers_.add(completed);
}
else
{
completed.execute();
}
}
public void deactivate(ScheduledCommand completed)
{
if (isActivated())
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
manageUI(false, true, completed);
}
else
{
completed.execute();
}
}
@Override
public void syncToEditor(SyncType syncType)
{
syncToEditor(syncType, null);
}
@Override
public void syncToEditor(SyncType syncType, Command ready)
{
// This is an asynchronous task, that we want to behave in a mostly FIFO
// way when overlapping calls to syncToEditor are made.
// Each syncToEditor operation can be thought of as taking place in three
// phases:
//
// 1 - Synchronously gathering state from panmirror, and kicking off the
// async pandoc operation
// 2 - The pandoc operation itself--this happens completely off the UI
// thread (in a different process in fact)
// 3 - With the result from pandoc, do some synchronous processing, sync
// the source editor, and invoke the `ready` parameter
//
// Part 2 is a "pure" operation so it doesn't matter when it runs. What
// matters is that phase 1 gathers state at the moment it's called, and
// if there are multiple operations in progress simultaneously, that the
// order in which different phase 3's are invoked reflect the order the
// operations were started. For example, if syncToEditor was called once
// (A) and then again (B), any of these sequences are fine:
// A1->A2->A3->B1->B2->B3
// A1->B1->A2->B2->A3->B3
// or even
// A1->B1->B2->A2->A3->B3
// but NOT
// A1->A2->B1->B2->B3->A3
//
// because if A1 comes before B1, then A3 must come before B3.
// Our plan of execution is:
// 1. Start the async operation
// 2a. Wait for the async operation to finish
// 2b. Wait for all preceding async operations to finish
// 3. Run our phase 3 logic and ready.execute()
// 4. Signal to the next succeeding async operation (if any) that we're
// done
// We use syncToEditorQueue_ to enforce the FIFO ordering. Because we
// don't know whether the syncToEditorQueue_ or the pandoc operation will
// finish first, we use a Rendezvous object to make sure both conditions
// are satisfied before we proceed.
Rendezvous rv = new Rendezvous(2);
syncToEditorQueue_.addCommand(new SerializedCommand() {
@Override
public void onExecute(Command continuation)
{
// We pass false to arrive() because it's important to not invoke
// the continuation before our phase 3 work has completed; the whole
// point is to enforce ordering of phase 3.
rv.arrive(() -> {
continuation.execute();
}, false);
}
});
if (isVisualEditorActive() && (syncType == SyncType.SyncTypeActivate || isDirty_)) {
// set flags
isDirty_ = false;
withPanmirror(() -> {
VisualModeMarkdownWriter.Options writerOptions =
visualModeWriterOptions_.optionsFromConfig(panmirror_.getPandocFormatConfig(true));
panmirror_.getMarkdown(writerOptions.options, kSerializationProgressDelayMs,
new CommandWithArg<JsObject>() {
@Override
public void execute(JsObject obj)
{
PanmirrorCode markdown = Js.uncheckedCast(obj);
rv.arrive(() ->
{
if (markdown == null)
{
// note that ready.execute() is never called in the error case
return;
}
// we are about to mutate the document, so create a single
// shot handler that will adjust the known position of
// items in the outline (we do this opportunistically
// unless executing code)
if (markdown.location != null && syncType != SyncType.SyncTypeExecution)
{
final Value<HandlerRegistration> handler = new Value<HandlerRegistration>(null);
handler.setValue(docDisplay_.addScopeTreeReadyHandler((evt) ->
{
alignScopeOutline(markdown.location);
handler.getValue().removeHandler();
}));
}
// apply diffs unless the wrap column changed (too expensive)
if (!writerOptions.wrapChanged)
{
TextEditorContainer.Changes changes = toEditorChanges(markdown);
getSourceEditor().applyChanges(changes, syncType == SyncType.SyncTypeActivate);
}
else
{
getSourceEditor().setCode(markdown.code);
}
// if the format comment has changed then show the reload prompt
if (panmirrorFormatConfig_.requiresReload())
{
view_.showPanmirrorFormatChanged(() ->
{
// dismiss the warning bar
view_.hideWarningBar();
// this will trigger the refresh b/c the format changed
syncFromEditorIfActivated();
});
}
if (markdown.location != null && syncType == SyncType.SyncTypeExecution)
{
// if syncing for execution, force a rebuild of the scope tree
alignScopeOutline(markdown.location);
}
// invoke ready callback if supplied
if (ready != null)
{
ready.execute();
}
}, true);
}
});
});
} else {
// Even if ready is null, it's important to arrive() so the
// syncToEditorQueue knows it can continue
rv.arrive(() ->
{
if (ready != null) {
ready.execute();
}
}, true);
}
}
@Override
public void syncFromEditorIfActivated()
{
if (isActivated())
{
// new editor content about to be sent to prosemirror, validate that we can edit it
String invalid = validateActivation();
if (invalid != null)
{
deactivateForInvalidSource(invalid);
return;
}
// get reference to the editing container
TextEditorContainer editorContainer = view_.editorContainer();
// show progress
progress_.beginProgressOperation(400);
editorContainer.activateWidget(progress_);
syncFromEditor((success) -> {
// clear progress
progress_.endProgressOperation();
// re-activate panmirror widget
editorContainer.activateWidget(panmirror_, false);
}, false);
}
}
@Override
public void syncFromEditor(final CommandWithArg<Boolean> done, boolean focus)
{
// flag to prevent the document being set to dirty when loading
// from source mode
loadingFromSource_ = true;
// if there is a previous format comment and it's changed then
// we need to tear down the editor instance and create a new one
if (panmirrorFormatConfig_ != null && panmirrorFormatConfig_.requiresReload())
{
panmirrorFormatConfig_ = null;
view_.editorContainer().removeWidget(panmirror_);
panmirror_ = null;
}
withPanmirror(() -> {
final String editorCode = getEditorCode();
final VisualModeMarkdownWriter.Options writerOptions = visualModeWriterOptions_.optionsFromCode(editorCode);
// serialize these calls (they are expensive on both the server side for the call(s)
// to pandoc, and on the client side for initialization of the editor (esp. ace editors)
setMarkdownQueue_.addTask(new PreemptiveTaskQueue.Task()
{
@Override
public String getLabel()
{
return target_.getTitle();
}
@Override
public boolean shouldPreempt()
{
return target_.isActiveDocument();
}
@Override
public void execute(final Command taskDone)
{
// join done commands
final CommandWithArg<Boolean> allDone = (result) -> {
taskDone.execute();
if (done != null)
done.execute(result);
};
panmirror_.setMarkdown(editorCode, writerOptions.options, true, kCreationProgressDelayMs,
new CommandWithArg<JsObject>() {
@Override
public void execute(JsObject obj)
{
// get result
PanmirrorSetMarkdownResult result = Js.uncheckedCast(obj);
// update flags
isDirty_ = false;
loadingFromSource_ = false;
// bail on error
if (result == null)
{
allDone.execute(false);
return;
}
// show warning and terminate if there was unparsed metadata. note that the other
// option here would be to have setMarkdown send the unparsed metadata back to the
// server to generate yaml, and then include the metadata as yaml at end the of the
// document. this could be done using the method outlined here:
// https://github.com/jgm/pandoc/issues/2019
// specifically using this template:
/*
$if(titleblock)$
$titleblock$
$else$
--- {}
$endif$
*/
/// ...with this command line:
/*
pandoc -t markdown --template=yaml.template foo.md
*/
if (JsObject.keys(result.unparsed_meta).length > 0)
{
view_.showWarningBar("Unable to activate visual mode (unsupported front matter format or non top-level YAML block)");
allDone.execute(false);
return;
}
// clear progress (for possible dialog overlays created by confirmation)
progress_.endProgressOperation();
// confirm if necessary
visualModeConfirm_.withSwitchConfirmation(
// allow inspection of result
result,
// onConfirmed
() -> {
// if pandoc's view of the document doesn't match the editor's we
// need to reset the editor's code (for both dirty state and
// so that diffs are efficient)
if (result.canonical != editorCode)
{
getSourceEditor().setCode(result.canonical);
markDirty();
}
// completed
allDone.execute(true);
// deferred actions
Scheduler.get().scheduleDeferred(() -> {
// if we are being focused it means we are switching from source mode, in that
// case sync our editing location to what it is in source
if (focus)
{
panmirror_.focus();
panmirror_.setEditingLocation(
visualModeLocation_.getSourceOutlineLocation(),
visualModeLocation_.savedEditingLocation()
);
}
// show any warnings
PanmirrorPandocFormat format = panmirror_.getPandocFormat();
if (result.unrecognized.length > 0)
{
view_.showWarningBar("Unrecognized Pandoc token(s); " + String.join(", ", result.unrecognized));
}
else if (format.warnings.invalidFormat.length() > 0)
{
view_.showWarningBar("Invalid Pandoc format: " + format.warnings.invalidFormat);
}
else if (format.warnings.invalidOptions.length > 0)
{
view_.showWarningBar("Unsupported extensions for markdown mode: " + String.join(", ", format.warnings.invalidOptions));;
}
});
},
// onCancelled
() -> {
allDone.execute(false);
}
);
}
});
}
});
});
}
public boolean canWriteCanonical()
{
return validateActivation() == null;
}
public void getCanonicalChanges(String code, CommandWithArg<PanmirrorChanges> completed)
{
withPanmirror(() -> {
VisualModeMarkdownWriter.Options writerOptions = visualModeWriterOptions_.optionsFromCode(code);
panmirror_.getCanonical(code, writerOptions.options, kSerializationProgressDelayMs,
(markdown) -> {
if (markdown != null)
{
if (!writerOptions.wrapChanged)
{
PanmirrorUIToolsSource sourceTools = new PanmirrorUITools().source;
TextChange[] changes = sourceTools.diffChars(code, markdown, 1);
completed.execute(new PanmirrorChanges(null, changes));
}
else
{
completed.execute(new PanmirrorChanges(markdown, null));
}
}
else
{
completed.execute(null);
}
});
});
}
/**
* Returns the width of the entire visual editor
*
* @return The visual editor's width.
*/
public int getPixelWidth()
{
return panmirror_.getOffsetWidth();
}
/**
* Returns the width of the content inside the visual editor
*
* @return Width of content.
*/
public int getContentWidth()
{
Element[] elements = DomUtils.getElementsByClassName(panmirror_.getElement(),
"pm-content");
if (elements.length < 1)
{
// if no root node, use the entire surface
return getPixelWidth();
}
return elements[0].getOffsetWidth();
}
public void manageCommands()
{
// hookup devtools
syncDevTools();
// disable commands
disableForVisualMode(
// Disabled since it just opens the scope tree widget (which doens't
// exist in visual mode)
commands_.jumpTo(),
// Disabled since diagnostics aren't active in visual mode
commands_.showDiagnosticsActiveDocument(),
// Disabled since we can't meaningfully select instances in several
// embedded editors simultaneously
commands_.findSelectAll(),
// Disabled since code folding doesn't work in embedded editors (there's
// no gutter in which to toggle folds)
commands_.fold(),
commands_.foldAll(),
commands_.unfold(),
commands_.unfoldAll(),
// Disabled since we don't have line numbers in the visual editor
commands_.goToLine()
);
// initially disable code commands (they will be re-enabled later when an
// editor has focus)
setCodeCommandsEnabled(false);
}
public void unmanageCommands()
{
restoreDisabledForVisualMode();
}
public void insertChunk(String chunkPlaceholder, int rowOffset, int colOffset)
{
panmirror_.insertChunk(chunkPlaceholder, rowOffset, colOffset);
}
/**
* Perform a command after synchronizing the selection state of the visual
* editor. Note that the command will not be performed unless focus is in a
* code editor (as otherwise we can't map selection 1-1).
*
* @param command
*/
public void performWithSelection(Command command)
{
// Drive focus to the editing surface. This is necessary so we correctly
// identify the active (focused) editor on which to perform the command.
panmirror_.focus();
// Perform the command in the active code editor, if any.
visualModeChunks_.performWithSelection(command);
}
public DocDisplay getActiveEditor()
{
return activeEditor_;
}
/**
* Sets the active (currently focused) code chunk editor.
*
* @param editor The current code chunk editor, or null if no code chunk
* editor has focus.
*/
public void setActiveEditor(DocDisplay editor)
{
activeEditor_ = editor;
if (editor != null)
{
// A code chunk has focus; enable code commands
setCodeCommandsEnabled(true);
}
}
/**
* Sets the enabled state for code commands -- i.e. those that require
* selection to be inside a chunk of code. We disable these outside code
* chunks.
*
* @param enabled Whether to enable code commands
*/
private void setCodeCommandsEnabled(boolean enabled)
{
AppCommand[] commands = {
commands_.commentUncomment(),
commands_.executeCode(),
commands_.executeCodeWithoutFocus(),
commands_.executeCodeWithoutMovingCursor(),
commands_.executeCurrentFunction(),
commands_.executeCurrentLine(),
commands_.executeCurrentParagraph(),
commands_.executeCurrentSection(),
commands_.executeCurrentStatement(),
commands_.executeFromCurrentLine(),
commands_.executeToCurrentLine(),
commands_.extractFunction(),
commands_.extractLocalVariable(),
commands_.goToDefinition(),
commands_.insertRoxygenSkeleton(),
commands_.profileCode(),
commands_.profileCodeWithoutFocus(),
commands_.reflowComment(),
commands_.reformatCode(),
commands_.reindent(),
commands_.renameInScope(),
commands_.runSelectionAsJob(),
commands_.runSelectionAsLauncherJob(),
commands_.sendToTerminal(),
};
for (AppCommand command : commands)
{
if (command.isVisible())
{
command.setEnabled(enabled);
}
}
}
public void goToNextSection()
{
panmirror_.execCommand(PanmirrorCommands.GoToNextSection);
}
public void goToPreviousSection()
{
panmirror_.execCommand(PanmirrorCommands.GoToPreviousSection);
}
public HasFindReplace getFindReplace()
{
if (panmirror_ != null) {
return panmirror_.getFindReplace();
} else {
return new HasFindReplace() {
public boolean isFindReplaceShowing() { return false; }
public void showFindReplace(boolean defaultForward) {}
public void hideFindReplace() {}
public void findNext() {}
public void findPrevious() {}
public void replaceAndFind() {}
};
}
}
public ToolbarButton getFindReplaceButton()
{
return findReplaceButton_;
}
public void checkSpelling()
{
visualModeSpelling_.checkSpelling(panmirror_.getSpellingDoc());
}
@Override
public void invalidateAllWords()
{
if (panmirror_ != null)
panmirror_.spellingInvalidateAllWords();
}
@Override
public void invalidateWord(String word)
{
if (panmirror_ != null)
panmirror_.spellingInvalidateWord(word);
}
@Override
public void onVisualModeSpellingAddToDictionary(VisualModeSpellingAddToDictionaryEvent event)
{
if (panmirror_ != null)
panmirror_.spellingInvalidateWord(event.getWord());
}
public boolean isVisualModePosition(SourcePosition position)
{
return visualModeNavigation_.isVisualModePosition(position);
}
public void navigate(SourcePosition position, boolean recordCurrentPosition)
{
visualModeNavigation_.navigate(position, recordCurrentPosition);
}
public void navigateToXRef(String xref, boolean recordCurrentPosition)
{
visualModeNavigation_.navigateToXRef(xref, recordCurrentPosition);
}
public void recordCurrentNavigationPosition()
{
visualModeNavigation_.recordCurrentNavigationPosition();
}
public SourcePosition getSourcePosition()
{
return visualModeNavigation_.getSourcePosition();
}
public boolean isAtRow(SourcePosition position)
{
if (visualModeNavigation_.isVisualModePosition(position))
{
return position.getRow() == getSourcePosition().getRow();
}
else
{
return false;
}
}
@Override
public String getYamlFrontMatter()
{
return panmirror_.getYamlFrontMatter();
}
@Override
public boolean applyYamlFrontMatter(String yaml)
{
panmirror_.applyYamlFrontMatter(yaml);
return true;
}
public void activateDevTools()
{
withPanmirror(() -> {
panmirror_.activateDevTools();
});
}
@Override
public void onSourceDocAdded(SourceDocAddedEvent e)
{
if (e.getDoc().getId() != docUpdateSentinel_.getId())
return;
// when interactively adding a visual mode doc, make sure we set the focus
// (special handling required b/c initialization of visual mode docs is
// async so can miss the normal setting of focus)
if (e.getMode() == Source.OPEN_INTERACTIVE && isActivated() && target_.isActiveDocument())
{
if (panmirror_ != null)
{
panmirror_.focus();
}
else if (isLoading_)
{
onReadyHandlers_.add(() -> panmirror_.focus());
}
}
}
public void onClosing()
{
if (syncOnIdle_ != null)
syncOnIdle_.suspend();
if (saveLocationOnIdle_ != null)
saveLocationOnIdle_.suspend();
}
public VisualModeChunk getChunkAtRow(int row)
{
return visualModeChunks_.getChunkAtRow(row);
}
public JsArray<ChunkDefinition> getChunkDefs()
{
return visualModeChunks_.getChunkDefs();
}
public ChunkDefinition getChunkDefAtRow(int row)
{
VisualModeChunk chunk = getChunkAtRow(row);
if (chunk == null)
return null;
return chunk.getDefinition();
}
@Override
public List<CommandPaletteItem> getCommandPaletteItems()
{
return panmirror_.getCommandPaletteItems();
}
public void focus(Command onComplete)
{
activate(() ->
{
panmirror_.focus();
if (onComplete != null)
{
onComplete.execute();
}
});
}
public void setChunkLineExecState(int start, int end, int state)
{
visualModeChunks_.setChunkLineExecState(start, end, state);
}
public void setChunkState(Scope chunk, int state)
{
visualModeChunks_.setChunkState(chunk, state);
}
public void onUserSwitchingToVisualMode()
{
visualModeConfirm_.onUserSwitchToVisualModePending();
}
public String getSelectedText()
{
return panmirror_.getSelectedText();
}
public void replaceSelection(String value)
{
panmirror_.replaceSelection(value);
}
private void manageUI(boolean activate, boolean focus)
{
manageUI(activate, focus, () -> {});
}
private void manageUI(boolean activate, boolean focus, ScheduledCommand completed)
{
// validate the activation
if (activate)
{
String invalid = validateActivation();
if (invalid != null)
{
deactivateWithMessage(invalid);
return;
}
}
// manage commands
manageCommands();
// manage toolbar buttons / menus in display
view_.manageCommandUI();
// get references to the editing container and it's source editor
TextEditorContainer editorContainer = view_.editorContainer();
// visual mode enabled (panmirror editor)
if (activate)
{
// set flag indicating that we are loading
isLoading_ = true;
// show progress (as this may well require either loading the
// panmirror library for the first time or a reload of visual mode,
// which is normally instant but for very, very large documents
// can take a couple of seconds)
progress_.beginProgressOperation(400);
editorContainer.activateWidget(progress_);
CommandWithArg<Boolean> done = (success) -> {
// clear progress
progress_.endProgressOperation();
if (success)
{
// sync to editor outline prefs
panmirror_.showOutline(establishOutlineVisible(), getOutlineWidth());
// show find replace button
findReplaceButton_.setVisible(true);
// activate widget
editorContainer.activateWidget(panmirror_, focus);
// begin save-on-idle behavior
syncOnIdle_.resume();
saveLocationOnIdle_.resume();
// (re)inject notebook output from the editor
target_.getNotebook().migrateCodeModeOutput();
// execute completed hook
Scheduler.get().scheduleDeferred(completed);
// clear loading flag and execute any onReady handlers
isLoading_ = false;
onReadyHandlers_.forEach(handler -> { Scheduler.get().scheduleDeferred(handler); });
onReadyHandlers_.clear();
}
else
{
editorContainer.activateEditor(focus);
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
}
};
withPanmirror(() -> {
// if we aren't currently active then set our markdown based
// on what's currently in the source ditor
if (!isVisualEditorActive())
{
syncFromEditor(done, focus);
}
else
{
done.execute(true);
}
});
}
// visual mode not enabled (source editor)
else
{
Command activateSourceEditor = () -> {
unmanageCommands();
// hide find replace button
findReplaceButton_.setVisible(false);
editorContainer.activateEditor(focus);
if (syncOnIdle_ != null)
syncOnIdle_.suspend();
if (saveLocationOnIdle_ != null)
saveLocationOnIdle_.suspend();
// move notebook outputs from visual mode
target_.getNotebook().migrateVisualModeOutput();
// execute completed hook
Scheduler.get().scheduleDeferred(completed);
};
// if we are deactivating to allow the user to edit invalid source code then don't sync
// back to the source editor (as this would have happened b/c we inspected the contents
// of the source editor in syncFromEditorIfActivated() and decided we couldn't edit it)
if (deactivatingForInvalidSource_)
{
deactivatingForInvalidSource_ = false;
activateSourceEditor.execute();
}
else
{
syncToEditor(SyncType.SyncTypeActivate, activateSourceEditor);
}
}
}
private void markDirty()
{
dirtyState_.markDirty(true);
source_.setSourceDocumentDirty(
docUpdateSentinel_.getId(), true,
new VoidServerRequestCallback());
}
private TextEditorContainer.Changes toEditorChanges(PanmirrorCode panmirrorCode)
{
// code to diff
String fromCode = getEditorCode();
String toCode = panmirrorCode.code;
// do the diff (timeout after 1 second). note that we only do this
// once the user has stopped typing for 1 second so it's not something
// that will run continuously during editing (in which case a much
// lower timeout would be warranted). note also that timeouts are for
// the diff planning phase so we will still get a valid diff back
// even if the timeout occurs.
PanmirrorUIToolsSource sourceTools = new PanmirrorUITools().source;
TextChange[] changes = sourceTools.diffChars(fromCode, toCode, 1);
// return changes w/ cursor
return new TextEditorContainer.Changes(
changes,
panmirrorCode.selection_only
? new TextEditorContainer.Navigator()
{
@Override
public void onNavigate(DocDisplay docDisplay)
{
visualModeLocation_.setSourceOutlineLocation(panmirrorCode.location);
}
}
: null
);
}
private void syncDevTools()
{
if (panmirror_ != null && panmirror_.devToolsLoaded())
panmirror_.activateDevTools();
}
private void withPanmirror(Command ready)
{
if (panmirror_ == null)
{
// create panmirror (no progress b/c we alread have pane progress)
PanmirrorContext context = createPanmirrorContext();
PanmirrorOptions options = panmirrorOptions();
PanmirrorWidget.Options widgetOptions = new PanmirrorWidget.Options();
PanmirrorWidget.create(context, visualModeFormat_.formatSource(),
options, widgetOptions, kCreationProgressDelayMs, (panmirror) -> {
// save reference to panmirror
panmirror_ = panmirror;
// track format comment (used to detect when we need to reload for a new format)
panmirrorFormatConfig_ = new VisualModeReloadChecker(view_);
// remove some keybindings that conflict with the ide
// (currently no known conflicts)
disableKeys();
// periodically sync edits back to main editor
syncOnIdle_ = new DebouncedCommand(1000)
{
@Override
protected void execute()
{
if (isDirty_ && !panmirror_.isInitialDoc())
syncToEditor(SyncType.SyncTypeNormal);
}
};
// periodically save selection
saveLocationOnIdle_ = new DebouncedCommand(1000)
{
@Override
protected void execute()
{
visualModeLocation_.saveEditingLocation(panmirror_.getEditingLocation());
}
};
// set dirty flag + nudge idle sync on change
panmirror_.addPanmirrorUpdatedHandler(new PanmirrorUpdatedEvent.Handler()
{
@Override
public void onPanmirrorUpdated(PanmirrorUpdatedEvent event)
{
// set flag and nudge sync on idle
isDirty_ = true;
syncOnIdle_.nudge();
// update editor dirty state if necessary
if (!loadingFromSource_ && !dirtyState_.getValue())
markDirty();
}
});
// save selection
panmirror_.addPanmirrorStateChangeHandler(new PanmirrorStateChangeEvent.Handler()
{
@Override
public void onPanmirrorStateChange(PanmirrorStateChangeEvent event)
{
saveLocationOnIdle_.nudge();
}
});
// forward navigation event
panmirror_.addPanmirrorNavigationHandler(new PanmirrorNavigationEvent.Handler()
{
@Override
public void onPanmirrorNavigation(PanmirrorNavigationEvent event)
{
visualModeNavigation_.onNavigated(event.getNavigation());
}
});
// check for external edit on focus
panmirror_.addPanmirrorFocusHandler(new PanmirrorFocusEvent.Handler()
{
@Override
public void onPanmirrorFocus(PanmirrorFocusEvent event)
{
target_.checkForExternalEdit(100);
// Disable code-related commands, on the presumption that we
// are in a prose region of the document. These commands will
// be re-enabled shortly if focus is sent to a code chunk, and
// will remain disabled if we aren't.
//
// Note that the PanmirrorFocusEvent is fired when selection
// exits a code chunk as well as when the entire widget loses
// focus.
setCodeCommandsEnabled(false);
// Also clear the last focused Ace editor. This is normally
// used by addins which need to target the 'active' editor,
// with the 'active' state persisting after other UI elements
// (e.g. the Addins toolbar) has been clicked. However, if
// focus has been moved to a new editor context, then we instead
// want to clear that state.
AceEditor.clearLastFocusedEditor();
}
});
// track changes in outline sidebar and save as prefs
panmirror_.addPanmirrorOutlineVisibleHandler((event) -> {
setOutlineVisible(event.getVisible());
});
panmirror_.addPanmirrorOutlineWidthHandler((event) -> {
setOutlineWidth(event.getWidth());
});
// manage latch state of findreplace button
panmirror_.addPanmirrorFindReplaceVisibleHandler((event) -> {
findReplaceButton_.setLeftImage(event.getVisible()
? FindReplaceBar.getFindLatchedIcon()
: FindReplaceBar.getFindIcon());
});
// good to go!
ready.execute();
});
}
else
{
// panmirror already created
ready.execute();
}
}
private PanmirrorContext createPanmirrorContext()
{
PanmirrorUIDisplay.ShowContextMenu showContextMenu = (commands, clientX, clientY) -> {
return panmirror_.showContextMenu(commands, clientX, clientY);
};
return visualModeContext_.createContext(showContextMenu);
}
private String getEditorCode()
{
return VisualModeUtil.getEditorCode(view_);
}
private TextEditorContainer.Editor getSourceEditor()
{
return view_.editorContainer().getEditor();
}
private boolean establishOutlineVisible()
{
return target_.establishPreferredOutlineWidgetVisibility(
prefs_.visualMarkdownEditingShowDocOutline().getValue()
);
}
private boolean getOutlineVisible()
{
return target_.getPreferredOutlineWidgetVisibility(
prefs_.visualMarkdownEditingShowDocOutline().getValue()
);
}
private void setOutlineVisible(boolean visible)
{
target_.setPreferredOutlineWidgetVisibility(visible);
}
private double getOutlineWidth()
{
return target_.getPreferredOutlineWidgetSize();
}
private void setOutlineWidth(double width)
{
target_.setPreferredOutlineWidgetSize(width);
}
private void disableKeys(String... commands)
{
PanmirrorKeybindings keybindings = disabledKeybindings(commands);
panmirror_.setKeybindings(keybindings);
}
private PanmirrorKeybindings disabledKeybindings(String... commands)
{
PanmirrorKeybindings keybindings = new PanmirrorKeybindings();
for (String command : commands)
keybindings.add(command, new String[0]);
return keybindings;
}
private void disableForVisualMode(AppCommand... commands)
{
if (isActivated())
{
for (AppCommand command : commands)
{
if (command.isVisible() && command.isEnabled())
{
command.setEnabled(false);
if (!disabledForVisualMode_.contains(command))
disabledForVisualMode_.add(command);
}
}
}
}
private void restoreDisabledForVisualMode()
{
disabledForVisualMode_.forEach((command) -> {
command.setEnabled(true);
});
disabledForVisualMode_.clear();
}
private HandlerRegistration onDocPropChanged(String prop, ValueChangeHandler<String> handler)
{
return docUpdateSentinel_.addPropertyValueChangeHandler(prop, handler);
}
private VisualModeNavigation.Context navigationContext_ = new VisualModeNavigation.Context() {
@Override
public String getId()
{
return docUpdateSentinel_.getId();
}
@Override
public String getPath()
{
return docUpdateSentinel_.getPath();
}
@Override
public PanmirrorWidget panmirror()
{
return panmirror_;
}
};
private PanmirrorOptions panmirrorOptions()
{
// create options
PanmirrorOptions options = new PanmirrorOptions();
// use embedded codemirror for code blocks
options.codeEditor = prefs_.visualMarkdownCodeEditor().getValue();
// highlight rmd example chunks
options.rmdExampleHighlight = true;
// add focus-visible class to prevent interaction with focus-visible.js
// (it ends up attempting to apply the "focus-visible" class b/c ProseMirror
// is contentEditable, and that triggers a dom mutation event for ProseMirror,
// which in turn causes us to lose table selections)
options.className = "focus-visible";
return options;
}
private String validateActivation()
{
if (this.docDisplay_.hasActiveCollabSession())
{
return "You cannot enter visual mode while using realtime collaboration.";
}
else if (BrowseCap.isInternetExplorer())
{
return "Visual mode is not supported in Internet Explorer.";
}
else
{
return visualModeFormat_.validateSourceForVisualMode();
}
}
private void deactivateForInvalidSource(String invalid)
{
deactivatingForInvalidSource_ = true;
deactivateWithMessage(invalid);
}
private void deactivateWithMessage(String message)
{
docUpdateSentinel_.setBoolProperty(TextEditingTarget.RMD_VISUAL_MODE, false);
view_.showWarningBar(message);
}
/**
* Align the document's scope tree with the code chunks in visual mode.
*
* @param location Array of outline locations from visual mode
*/
private void alignScopeOutline(PanmirrorEditingOutlineLocation location)
{
// Get all of the chunks from the document (code view)
ArrayList<Scope> chunkScopes = new ArrayList<Scope>();
ScopeList chunks = new ScopeList(docDisplay_);
chunks.selectAll(ScopeList.CHUNK);
for (Scope chunk : chunks)
{
chunkScopes.add(chunk);
}
// Get all of the chunks from the outline emitted by visual mode
ArrayList<PanmirrorEditingOutlineLocationItem> chunkItems =
new ArrayList<PanmirrorEditingOutlineLocationItem>();
for (int j = 0; j < location.items.length; j++)
{
if (location.items[j].type == PanmirrorOutlineItemType.RmdChunk)
{
chunkItems.add(location.items[j]);
}
}
// Refuse to proceed if cardinality doesn't match (consider: does this
// need to account for deeply nested chunks that might appear in one
// outline but not the other?)
if (chunkScopes.size() != chunkItems.size())
{
Debug.logWarning(chunkScopes.size() + " chunks in scope tree, but " +
chunkItems.size() + " chunks in visual editor.");
return;
}
for (int k = 0; k < chunkItems.size(); k++)
{
PanmirrorEditingOutlineLocationItem visualItem =
Js.uncheckedCast(chunkItems.get(k));
VisualModeChunk chunk = visualModeChunks_.getChunkAtVisualPosition(
visualItem.position);
if (chunk == null)
{
// This is normal; it is possible that we haven't created a chunk
// editor at this position yet.
continue;
}
chunk.setScope(chunkScopes.get(k));
}
}
private Commands commands_;
private UserPrefs prefs_;
private SourceServerOperations source_;
private DocDisplay activeEditor_; // the current embedded editor
private final TextEditingTarget target_;
private final TextEditingTarget.Display view_;
private final DocDisplay docDisplay_; // the parent editor
private final DirtyState dirtyState_;
private final DocUpdateSentinel docUpdateSentinel_;
private final VisualModePanmirrorFormat visualModeFormat_;
private final VisualModeChunks visualModeChunks_;
private final VisualModePanmirrorContext visualModeContext_;
private final VisualModeEditingLocation visualModeLocation_;
private final VisualModeMarkdownWriter visualModeWriterOptions_;
private final VisualModeNavigation visualModeNavigation_;
private final VisualModeConfirm visualModeConfirm_;
private final VisualModeSpelling visualModeSpelling_;
private VisualModeReloadChecker panmirrorFormatConfig_;
private DebouncedCommand syncOnIdle_;
private DebouncedCommand saveLocationOnIdle_;
private boolean isDirty_ = false;
private boolean loadingFromSource_ = false;
private boolean deactivatingForInvalidSource_ = false;
private PanmirrorWidget panmirror_;
private ToolbarButton findReplaceButton_;
private ArrayList<AppCommand> disabledForVisualMode_ = new ArrayList<AppCommand>();
private final ProgressPanel progress_;
private SerializedCommandQueue syncToEditorQueue_ = new SerializedCommandQueue();
private boolean isLoading_ = false;
private List<ScheduledCommand> onReadyHandlers_ = new ArrayList<ScheduledCommand>();
private static final int kCreationProgressDelayMs = 0;
private static final int kSerializationProgressDelayMs = 5000;
// priority task queue for expensive calls to panmirror_.setMarkdown
// (currently active tab bumps itself up in priority)
private static PreemptiveTaskQueue setMarkdownQueue_ = new PreemptiveTaskQueue(true, false);
}
| only disable code commands in visual mode; fixes #7934
| src/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/visualmode/VisualMode.java | only disable code commands in visual mode; fixes #7934 | <ide><path>rc/gwt/src/org/rstudio/studio/client/workbench/views/source/editors/text/visualmode/VisualMode.java
<ide>
<ide> // initially disable code commands (they will be re-enabled later when an
<ide> // editor has focus)
<del> setCodeCommandsEnabled(false);
<add> if (isActivated())
<add> {
<add> setCodeCommandsEnabled(false);
<add> }
<ide> }
<ide>
<ide> |
|
Java | apache-2.0 | e2bf6fcd9934c0ff235bb0c617cb870728504d2d | 0 | jbossas/jboss-vfs,psakar/jboss-vfs | /*
* JBoss, Home of Professional Open Source
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.test.vfs;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.jboss.test.vfs.support.ClassPathIterator;
import org.jboss.test.vfs.support.ClassPathIterator.ClassPathEntry;
import org.jboss.vfs.TempFileProvider;
import org.jboss.vfs.VFS;
import org.jboss.vfs.VFSUtils;
import org.jboss.vfs.VirtualFile;
import org.jboss.vfs.VisitorAttributes;
import org.jboss.vfs.util.SuffixMatchFilter;
/**
* Tests of the VFS implementation
*
* @author [email protected]
* @author [email protected]
* @version $Revision$
*/
public class FileVFSUnitTestCase extends AbstractVFSTest
{
private TempFileProvider provider;
public FileVFSUnitTestCase(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
provider = TempFileProvider.create("test", new ScheduledThreadPoolExecutor(2));
}
public void tearDown() throws Exception
{
provider.close();
}
public static Test suite()
{
return new TestSuite(FileVFSUnitTestCase.class);
}
public List<Closeable> recursiveMount(VirtualFile file) throws IOException
{
ArrayList<Closeable> mounts = new ArrayList<Closeable>();
if (!file.isDirectory() && file.getName().matches("^.*\\.[EeWwJj][Aa][Rr]$"))
mounts.add(VFS.mountZip(file, file, provider));
if (file.isDirectory())
for (VirtualFile child : file.getChildren())
mounts.addAll(recursiveMount(child));
return mounts;
}
/**
* Test that one can go from a file uri to VirtualFile and obtain the
* same VirtualFile using VirtualFile vfsfile uri
* @throws Exception
*/
public void testVFSFileURIFactory() throws Exception
{
URL rootURL = getClass().getProtectionDomain().getCodeSource().getLocation();
VFS rootVFS = VFS.getInstance();
VirtualFile root0 = rootVFS.getChild(rootURL.getPath());
VirtualFile root1 = rootVFS.getChild(root0.toURI().getPath());
assertEquals(root0, root1);
}
/**
* Test reading the contents of nested jar entries.
* @throws Exception
*/
public void testInnerJarFile() throws Exception
{
URL rootURL = getResource("/vfs/test");
VFS vfs = VFS.getInstance();
VirtualFile testdir = vfs.getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertTrue("outer.jar != null", outerjar != null);
VirtualFile jar1 = outerjar.getChild("jar1.jar");
assertTrue("outer.jar/jar1.jar != null", jar1 != null);
VirtualFile jar2 = outerjar.getChild("jar2.jar");
assertTrue("outer.jar/jar2.jar != null", jar2 != null);
VirtualFile jar1MF = jar1.getChild("META-INF/MANIFEST.MF");
assertNotNull("jar1!/META-INF/MANIFEST.MF", jar1MF);
InputStream mfIS = jar1MF.openStream();
Manifest mf1 = new Manifest(mfIS);
Attributes mainAttrs1 = mf1.getMainAttributes();
String title1 = mainAttrs1.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1", title1);
mfIS.close();
VirtualFile jar2MF = jar2.getChild("META-INF/MANIFEST.MF");
assertNotNull("jar2!/META-INF/MANIFEST.MF", jar2MF);
InputStream mfIS2 = jar2MF.openStream();
Manifest mf2 = new Manifest(mfIS2);
Attributes mainAttrs2 = mf2.getMainAttributes();
String title2 = mainAttrs2.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar2", title2);
mfIS2.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar
* @throws Exception
*/
public void testFindResource() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("outer.jar != null", jar != null);
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar
* @throws Exception
*/
public void testFindResourceUsingURLStream() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("outer.jar != null", jar != null);
/*
ArrayList<String> searchCtx = new ArrayList<String>();
searchCtx.add("outer.jar");
VirtualFile metaInf = vfs.resolveFile("META-INF/MANIFEST.MF", searchCtx);
*/
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
System.err.println(metaInf.toURL());
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
String urlString = metaInf.toURL().toString();
URL mfURL = new URL(urlString);
mfIS = mfURL.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
mf = new Manifest(mfIS);
mainAttrs = mf.getMainAttributes();
version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar that does not
* have parent directory entries.
* @throws Exception
*/
public void testFindResourceInFilesOnlyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("jar1-filesonly.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("jar1-filesonly.jar != null", jar != null);
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1-filesonly", title);
mfIS.close();
String urlString = metaInf.toURL().toString();
URL mfURL = new URL(urlString);
mfIS = mfURL.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
mf = new Manifest(mfIS);
mainAttrs = mf.getMainAttributes();
version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1-filesonly", title);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a war that does not
* have parent directory entries.
* @throws Exception
*/
public void testFindResourceInFilesOnlyWar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile war2 = testdir.getChild("WarDeployApp_web.war");
List<Closeable> mounts = recursiveMount(war2);
try
{
assertTrue("WarDeployApp_web.war != null", war2 != null);
VirtualFile classes2 = war2.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes2 != null);
assertTrue("WEB-INF/classes is not a leaf", classes2.isLeaf() == false);
classes2 = war2.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes2 != null);
assertTrue("WEB-INF/classes is not a leaf", classes2.isLeaf() == false);
VirtualFile HelloJavaBean = classes2.getChild("com/sun/ts/tests/webservices/deploy/warDeploy/HelloJavaBean.class");
assertTrue("HelloJavaBean.class != null", HelloJavaBean != null);
assertTrue("HelloJavaBean.class is a leaf", HelloJavaBean.isLeaf());
VirtualFile war = testdir.getChild("filesonly.war");
mounts.addAll(recursiveMount(war));
assertTrue("filesonly.war != null", war != null);
VirtualFile classes = war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
assertTrue("WEB-INF/classes is not a directory", classes.isDirectory());
VirtualFile jar1 = war.getChild("WEB-INF/lib/jar1.jar");
assertTrue("WEB-INF/lib/jar1.jar != null", jar1 != null);
assertTrue("WEB-INF/lib/jar1.jar is not a leaf", jar1.isLeaf() == false);
VirtualFile ClassInJar1 = jar1.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1.class");
assertTrue("ClassInJar1.class != null", ClassInJar1 != null);
assertTrue("ClassInJar1.class is a leaf", ClassInJar1.isLeaf());
VirtualFile metaInf = war.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("filesonly-war", title);
mfIS.close();
war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
assertTrue("WEB-INF/classes is not a leaf", classes.isLeaf() == false);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate iterating over a vfs url from a files only war.
*
* @throws Exception
*/
public void testFindClassesInFilesOnlyWar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile war = testdir.getChild("filesonly.war");
List<Closeable> mounts = recursiveMount(war);
try
{
assertTrue("filesonly.war != null", war != null);
VirtualFile classes = war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
HashSet<String> names = new HashSet<String>();
ClassPathIterator iter = new ClassPathIterator(classes.toURL());
ClassPathEntry entry = null;
while ((entry = iter.getNextEntry()) != null)
{
names.add(entry.name);
}
log.debug(names);
assertTrue("org/jboss/test/vfs/support/jar1", names.contains("org/jboss/test/vfs/support/jar1"));
assertTrue("ClassInJar1.class", names.contains("org/jboss/test/vfs/support/jar1/ClassInJar1.class"));
assertTrue("ClassInJar1$InnerClass.class", names.contains("org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testFindResourceUnpackedJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("unpacked-outer.jar");
assertTrue("unpacked-outer.jar != null", jar != null);
/**
ArrayList<String> searchCtx = new ArrayList<String>();
searchCtx.add("unpacked-outer.jar");
VirtualFile metaInf = vfs.resolveFile("META-INF/MANIFEST.MF", searchCtx);
*/
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
/**
* Test simple file resolution without search contexts
* @throws Exception
*/
public void testResolveFile() throws Exception
{
log.info("+++ testResolveFile, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Check resolving the root file
VirtualFile root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerJar);
try
{
assertNotNull("outer.jar", outerJar);
assertEquals("outer.jar name", "outer.jar", outerJar.getName());
assertEquals("outer.jar path", rootURL.getPath() + "/outer.jar", outerJar.getPathName());
VirtualFile outerJarMF = testdir.getChild("outer.jar/META-INF/MANIFEST.MF");
assertNotNull("outer.jar/META-INF/MANIFEST.MF", outerJarMF);
// Test a non-canonical path
rootURL = getResource("/vfs/sundry/../test");
// Check resolving the root file
root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate resolving a .class file given a set of search contexts in the
* vfs that make up a classpath.
*
* @throws Exception
*/
public void testResolveClassFileInClassPath() throws Exception
{
log.info("+++ testResolveFile, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Find ClassInJar1.class
VirtualFile vf = testdir.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(vf);
try
{
VirtualFile c1 = vf.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1.class");
assertNotNull("ClassInJar1.class VF", c1);
log.debug("Found ClassInJar1.class: " + c1);
// Find ClassInJar1$InnerClass.class
VirtualFile c1i = vf.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
assertNotNull("ClassInJar1$InnerClass.class VF", c1i);
log.debug("Found ClassInJar1$InnerClass.class: " + c1i);
// Find ClassInJar2.class
vf = testdir.getChild("jar2.jar");
mounts.addAll(recursiveMount(vf));
VirtualFile c2 = vf.getChild("org/jboss/test/vfs/support/jar2/ClassInJar2.class");
assertNotNull("ClassInJar2.class VF", c2);
log.debug("Found ClassInJar2.class: " + c2);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testResolveFileInUnpackedJar() throws Exception
{
log.info("+++ testResolveFileInUnpackedJar, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Check resolving the root file
VirtualFile root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
assertNotNull("unpacked-outer.jar", outerJar);
assertEquals("unpacked-outer.jar name", "unpacked-outer.jar", outerJar.getName());
assertEquals("unpacked-outer.jar path", rootURL.getPath() + "/unpacked-outer.jar", outerJar.getPathName());
VirtualFile outerJarMF = testdir.getChild("unpacked-outer.jar/META-INF/MANIFEST.MF");
assertNotNull("unpacked-outer.jar/META-INF/MANIFEST.MF", outerJarMF);
// Check resolving the root file
root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
}
public void testFileNotFoundInUnpackedJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
assertNotNull("unpacked-outer.jar", outerJar);
assertFalse(outerJar.getChild("WEB-INF").exists());
}
public void testNestedNestedParent() throws Exception
{
// TODO
}
public void testCopyNestedStream() throws Exception
{
// TODO
}
/**
* Test file resolution with nested jars
* @throws Exception
*/
public void testInnerJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = testdir.getChild("outer.jar/jar1.jar");
log.info("IsFile: " + inner.isLeaf());
log.info(inner.getLastModified());
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
assertTrue("jar1.jar children.length(" + contents.size() + ") >= 2", contents.size() >= 2);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testInnerJarUsingURLStream() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = testdir.getChild("outer.jar/jar1.jar");
log.info("IsFile: " + inner.isLeaf());
log.info(inner.getLastModified());
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
assertTrue("jar1.jar children.length(" + contents.size() + ") >= 2", contents.size() >= 2);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.toURL().openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the outer.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScan() throws Exception
{
URL rootURL = getResource("/vfs/test/outer.jar");
VirtualFile outer = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(outer);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(outer.getPathName() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(outer.getPathName() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(outer.getPathName() + "/jar1-filesonly.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(outer.getPathName() + "/jar1-filesonly.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(outer.getPathName() + "/jar2.jar/org/jboss/test/vfs/support/jar2/ClassInJar2.class");
expectedClasses.add(outer.getPathName() + "/org/jboss/test/vfs/support/CommonClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = outer.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 6 classes", 6, count);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the unpacked-outer.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScanUnpacked() throws Exception
{
URL rootURL = getResource("/vfs/test/unpacked-outer.jar");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(testdir);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(rootURL.getPath() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(rootURL.getPath() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(rootURL.getPath() + "/jar2.jar/org/jboss/test/vfs/support/jar2/ClassInJar2.class");
// FIXME: .class files are not being copied from the resources directory
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/CommonClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = testdir.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 4 classes", 4, count);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the jar1-filesonly.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScanFilesonly() throws Exception
{
URL rootURL = getResource("/vfs/test/jar1-filesonly.jar");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(testdir);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = testdir.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 2 classes", 2, count);
// Make sure we can walk path-wise to the class
VirtualFile parent = testdir;
String className = "org/jboss/test/vfs/support/jar1/ClassInJar1.class";
VirtualFile classInJar1 = testdir.getChild(className);
String[] paths = className.split("/");
StringBuilder vfsPath = new StringBuilder();
for (String path : paths)
{
vfsPath.append(path);
VirtualFile vf = parent.getChild(path);
if (path.equals("ClassInJar1.class"))
assertEquals("ClassInJar1.class", classInJar1, vf);
else
{
assertEquals("vfsPath", testdir.getPathName() + "/" + vfsPath.toString(), vf.getPathName());
// why should this be equal?
// assertEquals("lastModified", classInJar1.getLastModified(), vf.getLastModified());
assertTrue("lastModified", classInJar1.getLastModified() <= vf.getLastModified());
}
vfsPath.append('/');
parent = vf;
}
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test access of directories in a jar that only stores files
* @throws Exception
*/
public void testFilesOnlyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("jar1-filesonly.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
VirtualFile metadataLocation = jar.getChild("META-INF");
assertNotNull(metadataLocation);
VirtualFile mfFile = metadataLocation.getChild("MANIFEST.MF");
assertNotNull(mfFile);
InputStream is = mfFile.openStream();
Manifest mf = new Manifest(is);
is.close();
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1-filesonly", title);
// Retry starting from the jar root
mfFile = jar.getChild("META-INF/MANIFEST.MF");
is = mfFile.openStream();
mf = new Manifest(is);
is.close();
title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1-filesonly", title);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test the serialization of VirtualFiles
* @throws Exception
*/
public void testVFSerialization() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
tmpRoot.deleteOnExit();
File tmp = new File(tmpRoot, "vfs.ser");
tmp.createNewFile();
tmp.deleteOnExit();
log.info("+++ testVFSerialization, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild("vfs.ser");
FileOutputStream fos = new FileOutputStream(tmp);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(tmpVF);
oos.close();
// Check the tmpVF attributes against the tmp file
long lastModified = tmp.lastModified();
long size = tmp.length();
String name = tmp.getName();
String vfsPath = tmp.getPath();
URL url = tmp.toURI().toURL();
log.debug("name: " + name);
log.debug("vfsPath: " + vfsPath);
log.debug("url: " + url);
log.debug("lastModified: " + lastModified);
log.debug("size: " + size);
assertEquals("name", name, tmpVF.getName());
assertEquals("pathName", vfsPath, tmpVF.getPathName());
assertEquals("lastModified", lastModified, tmpVF.getLastModified());
assertEquals("size", size, tmpVF.getSize());
assertEquals("url", url, tmpVF.toURL());
assertEquals("isLeaf", true, tmpVF.isLeaf());
//assertEquals("isHidden", false, tmpVF.isHidden());
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(tmp);
ObjectInputStream ois = new ObjectInputStream(fis);
VirtualFile tmpVF2 = (VirtualFile)ois.readObject();
ois.close();
// Validated the deserialized attribtes against the tmp file
assertEquals("name", name, tmpVF2.getName());
assertEquals("pathName", vfsPath, tmpVF2.getPathName());
assertEquals("lastModified", lastModified, tmpVF2.getLastModified());
assertEquals("size", size, tmpVF2.getSize());
assertEquals("url", url, tmpVF2.toURL());
assertEquals("isLeaf", true, tmpVF2.isLeaf());
//assertEquals("isHidden", false, tmpVF2.isHidden());
}
/**
* Test the serialization of VirtualFiles representing a jar
* @throws Exception
*/
public void testVFJarSerialization() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
tmpRoot.deleteOnExit();
// Create a test jar containing a txt file
File tmpJar = new File(tmpRoot, "tst.jar");
tmpJar.createNewFile();
tmpJar.deleteOnExit();
FileOutputStream fos = new FileOutputStream(tmpJar);
JarOutputStream jos = new JarOutputStream(fos);
// Write a text file to include in a test jar
JarEntry txtEntry = new JarEntry("tst.txt");
jos.putNextEntry(txtEntry);
txtEntry.setSize("testVFJarSerialization".length());
txtEntry.setTime(System.currentTimeMillis());
jos.write("testVFJarSerialization".getBytes());
jos.close();
log.info("+++ testVFJarSerialization, tmp=" + tmpJar.getCanonicalPath());
URI rootURI = tmpRoot.toURI();
VirtualFile tmp = VFS.getInstance().getChild(rootURI.getPath());
File vfsSer = new File(tmpRoot, "vfs.ser");
vfsSer.createNewFile();
vfsSer.deleteOnExit();
VirtualFile tmpVF = tmp.getChild("tst.jar");
// Validate the vf jar against the tmp file attributes
long lastModified = tmpJar.lastModified();
long size = tmpJar.length();
String name = tmpJar.getName();
String vfsPath = tmpJar.getPath();
URL url = tmpJar.toURL();
//url = JarUtils.createJarURL(url);
log.debug("name: " + name);
log.debug("vfsPath: " + vfsPath);
log.debug("url: " + url);
log.debug("lastModified: " + lastModified);
log.debug("size: " + size);
assertEquals("name", name, tmpVF.getName());
assertEquals("pathName", vfsPath, tmpVF.getPathName());
assertEquals("lastModified", lastModified, tmpVF.getLastModified());
assertEquals("size", size, tmpVF.getSize());
assertEquals("url", url.getPath(), tmpVF.toURL().getPath());
// TODO: these should pass
assertEquals("isDirectory", false, tmpVF.isDirectory());
//assertEquals("isHidden", false, tmpVF.isHidden());
// Write out the vfs jar file
fos = new FileOutputStream(vfsSer);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(tmpVF);
oos.close();
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(vfsSer);
ObjectInputStream ois = new ObjectInputStream(fis);
VirtualFile tmpVF2 = (VirtualFile)ois.readObject();
ois.close();
// Validate the vf jar against the tmp file attributes
assertEquals("name", name, tmpVF2.getName());
assertEquals("pathName", vfsPath, tmpVF2.getPathName());
assertEquals("lastModified", lastModified, tmpVF2.getLastModified());
assertEquals("size", size, tmpVF2.getSize());
assertEquals("url", url.getPath(), tmpVF2.toURL().getPath());
// TODO: these should pass
assertEquals("isDirectory", false, tmpVF2.isDirectory());
//assertEquals("isHidden", false, tmpVF2.isHidden());
}
/**
* Test the serialization of VirtualFiles representing a jar
* @throws Exception
*/
public void testVFNestedJarSerialization() throws Exception
{
// this expects to be run with a working dir of the container root
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = outer.getChild("jar1.jar");
File vfsSer = File.createTempFile("testVFNestedJarSerialization", ".ser");
vfsSer.deleteOnExit();
// Write out the vfs inner jar file
FileOutputStream fos = new FileOutputStream(vfsSer);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(inner);
oos.close();
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(vfsSer);
ObjectInputStream ois = new ObjectInputStream(fis);
inner = (VirtualFile)ois.readObject();
ois.close();
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
// TODO - fix this once no_copy serialization is working
int size = 2;
assertTrue("jar1.jar children.length(" + contents.size() + ") is not " + size, contents.size() >= size);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
/*
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
*/
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the URL of a VFS corresponding to a directory ends in '/' so that
* URLs created relative to it are under the directory. This requires that
* build-test.xml artifacts exist.
*
* @throws Exception
*/
public void testDirURLs() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
URL outerURL = outerJar.toURL();
log.debug("outerURL: " + outerURL);
assertTrue(outerURL + " ends in '/'", outerURL.getPath().endsWith("/"));
// Validate that jar1 is under unpacked-outer.jar
URL jar1URL = new URL(outerURL, "jar1.jar/");
log.debug("jar1URL: " + jar1URL + ", path=" + jar1URL.getPath());
assertTrue("jar1URL path ends in unpacked-outer.jar/jar1.jar!/", jar1URL.getPath().endsWith("unpacked-outer.jar/jar1.jar/"));
VirtualFile jar1 = outerJar.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(jar1);
try
{
assertEquals(jar1URL.getPath(), jar1.toURL().getPath());
VirtualFile packedJar = testdir.getChild("jar1.jar");
mounts.addAll(recursiveMount(packedJar));
jar1URL = packedJar.getChild("org/jboss/test/vfs/support").toURL();
assertTrue("Jar directory entry URLs must end in /: " + jar1URL.toString(), jar1URL.toString().endsWith("/"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the URI of a VFS corresponding to a directory ends in '/' so that
* URIs created relative to it are under the directory. This requires that
* build-test.xml artifacts exist.
*
* @throws Exception
*/
public void testDirURIs() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
URI outerURI = outerJar.toURI();
log.debug("outerURI: " + outerURI);
assertTrue(outerURI + " ends in '/'", outerURI.getPath().endsWith("/"));
// Validate that jar1 is under unpacked-outer.jar
URI jar1URI = new URI(outerURI + "jar1.jar/");
log.debug("jar1URI: " + jar1URI + ", path=" + jar1URI.getPath());
assertTrue("jar1URI path ends in unpacked-outer.jar/jar1.jar!/", jar1URI.getPath().endsWith("unpacked-outer.jar/jar1.jar/"));
VirtualFile jar1 = outerJar.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(jar1);
try
{
assertEquals(jar1URI.getPath(), jar1.toURI().getPath());
VirtualFile packedJar = testdir.getChild("jar1.jar");
mounts.addAll(recursiveMount(packedJar));
jar1URI = packedJar.getChild("org/jboss/test/vfs/support").toURI();
assertTrue("Jar directory entry URLs must end in /: " + jar1URI.toString(), jar1URI.toString().endsWith("/"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test copying a jar
*
* @throws Exception
*/
public void testCopyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
assertTrue("outer.jar != null", jar != null);
File tmpJar = File.createTempFile("testCopyJar", ".jar");
tmpJar.deleteOnExit();
try
{
InputStream is = jar.openStream();
FileOutputStream fos = new FileOutputStream(tmpJar);
byte[] buffer = new byte[1024];
int read;
while ((read = is.read(buffer)) > 0)
{
fos.write(buffer, 0, read);
}
fos.close();
log.debug("outer.jar size is: " + jar.getSize());
log.debug(tmpJar.getAbsolutePath() + " size is: " + tmpJar.length());
assertTrue("outer.jar > 0", jar.getSize() > 0);
assertEquals("copy jar size", jar.getSize(), tmpJar.length());
is.close();
}
finally
{
try
{
tmpJar.delete();
}
catch (Exception ignore)
{
}
}
}
/**
* Test copying a jar that is nested in another jar.
*
* @throws Exception
*/
public void testCopyInnerJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertTrue("outer.jar != null", outerjar != null);
VirtualFile jar = outerjar.getChild("jar1.jar");
assertTrue("outer.jar/jar1.jar != null", jar != null);
File tmpJar = File.createTempFile("testCopyInnerJar", ".jar");
tmpJar.deleteOnExit();
try
{
InputStream is = jar.openStream();
FileOutputStream fos = new FileOutputStream(tmpJar);
byte[] buffer = new byte[1024];
int read;
while ((read = is.read(buffer)) > 0)
{
fos.write(buffer, 0, read);
}
fos.close();
log.debug("outer.jar/jar1.jar size is: " + jar.getSize());
log.debug(tmpJar.getAbsolutePath() + " size is: " + tmpJar.length());
assertTrue("outer.jar > 0", jar.getSize() > 0);
assertEquals("copy jar size", jar.getSize(), tmpJar.length());
is.close();
}
finally
{
try
{
tmpJar.delete();
}
catch (Exception ignore)
{
}
}
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the outermf.jar manifest classpath is parsed
* correctly.
*
* @throws Exception
*/
public void testManifestClasspath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outermf.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertNotNull("outermf.jar != null", outerjar);
ArrayList<VirtualFile> cp = new ArrayList<VirtualFile>();
VFSUtils.addManifestLocations(outerjar, cp);
// The p0.jar should be found in the classpath
assertEquals("cp size 2", 2, cp.size());
assertEquals("jar1.jar == cp[0]", "jar1.jar", cp.get(0).getName());
assertEquals("jar2.jar == cp[1]", "jar2.jar", cp.get(1).getName());
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that an inner-inner jar that is extracted does not blowup
* the addManifestLocations routine.
*
* @throws Exception
*/
public void testInnerManifestClasspath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("withalong/rootprefix/outermf.jar");
assertNotNull(outerjar);
List<Closeable> mounts = recursiveMount(outerjar);
try
{
VirtualFile jar1 = outerjar.getChild("jar1.jar");
assertNotNull(jar1);
VirtualFile jar2 = outerjar.getChild("jar2.jar");
assertNotNull(jar2);
VirtualFile innerjar = outerjar.getChild("innermf.jar");
assertNotNull("innermf.jar != null", innerjar);
ArrayList<VirtualFile> cp = new ArrayList<VirtualFile>();
VFSUtils.addManifestLocations(innerjar, cp);
assertEquals(2, cp.size());
VirtualFile cp0 = cp.get(0);
assertEquals(jar1, cp0);
VirtualFile cp1 = cp.get(1);
assertEquals(jar2, cp1);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate accessing an packed jar vf and its uri when the vfs path
* contains spaces
* @throws Exception
*/
public void testJarWithSpacesInPath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tstjar = testdir.getChild("path with spaces/tst.jar");
List<Closeable> mounts = recursiveMount(tstjar);
try
{
assertNotNull("tstjar != null", tstjar);
URI uri = tstjar.toURI();
URI expectedURI = new URI("vfs" + rootURL.toString() + "/path%20with%20spaces/tst.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
InputStream is = uri.toURL().openStream();
is.close();
tstjar = testdir.getChild("path with spaces/tst%20nospace.jar");
mounts.addAll(recursiveMount(tstjar));
assertNotNull("tstjar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/path%20with%20spaces/tst%2520nospace.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
is = uri.toURL().openStream();
is.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testJarWithSpacesInContext() throws Exception
{
URL rootURL = getResource("/vfs/test/path with spaces");
VirtualFile testdir = VFS.getInstance().getChild(URLDecoder.decode(rootURL.getPath(), "UTF-8"));
VirtualFile tstear = testdir.getChild("spaces.ear");
List<Closeable> mounts = recursiveMount(tstear);
try
{
assertNotNull("spaces.ear != null", tstear);
assertTrue(tstear.isDirectory());
URI uri = tstear.toURI();
URI expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/");
assertEquals(expectedURI.getPath(), uri.getPath());
InputStream is = uri.toURL().openStream();
is.close();
VirtualFile tstjar = tstear.getChild("spaces-ejb.jar");
assertNotNull("spaces-ejb.jar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/spaces-ejb.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
assertFalse(tstjar.isLeaf());
is = uri.toURL().openStream();
is.close();
tstjar = tstear.getChild("spaces-lib.jar");
assertNotNull("spaces-lib.jar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/spaces-lib.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
assertFalse(tstjar.isLeaf());
is = uri.toURL().openStream();
is.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate accessing an unpacked jar vf and its uri when the vfs path
* contains spaces
* @throws Exception
*/
public void testUnpackedJarWithSpacesInPath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tstjar = testdir.getChild("path with spaces/unpacked-tst.jar");
assertNotNull("tstjar != null", tstjar);
URI uri = tstjar.toURI();
URI expectedURI = new URI(rootURL.toString() + "/path%20with%20spaces/unpacked-tst.jar/");
assertEquals(uri, expectedURI);
}
// /**
// * Tests that we can find the META-INF/some-data.xml in an unpacked deployment
// *
// * @throws Exception for any error
// */
// public void testGetMetaDataUnpackedJar() throws Exception
// {
// testGetMetaDataFromJar("unpacked-with-metadata.jar");
// }
//
// /**
// * Tests that we can find the META-INF/some-data.xml in a packed deployment
// *
// * @throws Exception for any error
// */
// public void testGetMetaDataPackedJar() throws Exception
// {
// testGetMetaDataFromJar("with-metadata.jar");
// }
// private void testGetMetaDataFromJar(String name) throws Exception
// {
// URL rootURL = getResource("/vfs/test");
// VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
//
// VirtualFile jar = testdir.getChild(name);
// assertNotNull(jar);
// VirtualFile metadataLocation = jar.getChild("META-INF");
// assertNotNull(metadataLocation);
//
// VirtualFile metadataByName = metadataLocation.getChild("some-data.xml");
// assertNotNull(metadataByName);
//
// //This is the same code as is called by AbstractDeploymentContext.getMetaDataFiles(String name, String suffix).
// //The MetaDataMatchFilter is a copy of the one used there
// List<VirtualFile> metaDataList = metadataLocation.getChildren(new MetaDataMatchFilter(null, "-data.xml"));
// assertNotNull(metaDataList);
// assertEquals("Wrong size", 1, metaDataList.size());
// }
/**
* Validate that a URLClassLoader.findReource/getResourceAsStream calls for non-existing absolute
* resources that should fail as expected with null results. Related to JBMICROCONT-139.
*
* @throws Exception
*/
public void testURLClassLoaderFindResourceFailure() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
URL[] cp = { testdir.toURL() };
URLClassLoader ucl = new URLClassLoader(cp);
// Search for a non-existent resource
URL qp = ucl.findResource("nosuch-quartz.props");
assertNull("findResource(nosuch-quartz.props)", qp);
InputStream is = ucl.getResourceAsStream("nosuch-quartz.props");
assertNull("getResourceAsStream(nosuch-quartz.props)", is);
}
/**
* Test VirtualFile.exists for vfsfile based urls.
*
* @throws Exception
*/
public void testFileExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testFileExists", null, tmpRoot);
log.info("+++ testFileExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue("tmp.delete()", tmpVF.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsfile based urls for a directory.
*
* @throws Exception
*/
public void testDirFileExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testFileExists", null, tmpRoot);
assertTrue(tmp + ".delete()", tmp.delete());
assertTrue(tmp + ".mkdir()", tmp.mkdir());
log.info("+++ testDirFileExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertFalse(tmpVF.getPathName() + ".isLeaf()", tmpVF.isLeaf());
assertTrue(tmp + ".delete()", tmp.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsjar based urls.
*
* @throws Exception
*/
public void testJarExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmpJar = File.createTempFile("testJarExists", ".jar", tmpRoot);
log.info("+++ testJarExists, tmpJar=" + tmpJar.getCanonicalPath());
Manifest mf = new Manifest();
mf.getMainAttributes().putValue("Created-By", "FileVFSUnitTestCase.testJarExists");
FileOutputStream fos = new FileOutputStream(tmpJar);
JarOutputStream jos = new JarOutputStream(fos, mf);
jos.setComment("testJarExists");
jos.setLevel(0);
jos.close();
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmpJar.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpVF.getPathName() + ".size() > 0", tmpVF.getSize() > 0);
assertTrue("tmp.delete()", tmpVF.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsjar based urls for a directory.
*
* @throws Exception
*/
public void testDirJarExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testDirJarExists", ".jar", tmpRoot);
assertTrue(tmp + ".delete()", tmp.delete());
assertTrue(tmp + ".mkdir()", tmp.mkdir());
log.info("+++ testDirJarExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
log.info(tmpVF);
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertFalse(tmpVF.getPathName() + ".isLeaf()", tmpVF.isLeaf());
assertTrue(tmp + ".delete()", tmp.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.delete() for file based urls
*
* @throws Exception
*/
public void testFileDelete() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
VirtualFile root = VFS.getInstance().getChild(tmpRoot.getPath());
// non-existent directory - exists() not
tmpRoot.delete();
assertFalse(tmpRoot + ".exits() == false", root.exists());
// existing directory - exists(), delete()
tmpRoot.mkdir();
assertTrue(tmpRoot + ".exits()", root.exists());
assertTrue(tmpRoot + ".delete()", root.delete());
tmpRoot.mkdir();
// non-empty directory - delete()
File tmp = new File(tmpRoot, "testFileDelete.jar");
assertTrue(tmp.mkdir());
File tmp2 = File.createTempFile("testFileDelete2", ".jar", tmp);
assertTrue(tmp2.exists());
VirtualFile tmpDeletable = VFS.getRoot(tmp.toURI());
assertFalse(tmpRoot + ".delete() == false", tmpDeletable.delete());
// children() exist
List<VirtualFile> children = root.getChildren();
assertEquals(tmpRoot + ".getChildren().size() == 1", 1, children.size());
// specific child exists(), delete(), exists() not
VirtualFile tmpVF = root.getChild(tmp.getName());
assertTrue(tmp + ".exists()", tmpVF.exists());
assertTrue(tmp + ".delete()", tmp2.delete());
assertTrue(tmp + ".delete()", tmpVF.delete());
assertFalse(tmp + ".exists() == false", tmpVF.exists());
// children() don't exist
children = root.getChildren();
assertTrue(tmpRoot + ".getChildren().size() == 0", children.size() == 0);
// directory delete()
assertTrue(tmpRoot + ".delete()", root.delete());
}
/**
* Test for <em>caseSensitive=true</em>
*
* If this test passes on unixes, it doesn't mean much, because there it should pass without
* case sensitivity turned on as well.
*
* If it passes on windows, it means the functionality works as expected.
*
* @throws Exception for any error
*/
// public void testCaseSensitive() throws Exception
// {
// URL rootURL = getResource("/vfs");
//
// FileSystemContext ctx = new FileSystemContext(new URL(rootURL.toString() + "?caseSensitive=true"));
// VirtualFileHandler root = ctx.getRoot();
//
// String path = "context/file/simple/child";
// VirtualFileHandler child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/file/simple/CHILD";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.jar";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/JAR/archive.jar";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.JAR";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.jar/child";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/jar/archive.jar/CHILD";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
// }
}
| src/test/java/org/jboss/test/vfs/FileVFSUnitTestCase.java | /*
* JBoss, Home of Professional Open Source
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.test.vfs;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import java.util.jar.Manifest;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.jboss.test.vfs.support.ClassPathIterator;
import org.jboss.test.vfs.support.ClassPathIterator.ClassPathEntry;
import org.jboss.vfs.TempFileProvider;
import org.jboss.vfs.VFS;
import org.jboss.vfs.VFSUtils;
import org.jboss.vfs.VirtualFile;
import org.jboss.vfs.VisitorAttributes;
import org.jboss.vfs.util.SuffixMatchFilter;
/**
* Tests of the VFS implementation
*
* @author [email protected]
* @author [email protected]
* @version $Revision$
*/
public class FileVFSUnitTestCase extends AbstractVFSTest
{
private TempFileProvider provider;
public FileVFSUnitTestCase(String name)
{
super(name);
}
public void setUp() throws Exception
{
super.setUp();
provider = TempFileProvider.create("test", new ScheduledThreadPoolExecutor(2));
}
public void tearDown() throws Exception
{
provider.close();
}
public static Test suite()
{
return new TestSuite(FileVFSUnitTestCase.class);
}
public List<Closeable> recursiveMount(VirtualFile file) throws IOException
{
ArrayList<Closeable> mounts = new ArrayList<Closeable>();
if (!file.isDirectory() && file.getName().matches("^.*\\.[EeWwJj][Aa][Rr]$"))
mounts.add(VFS.mountZip(file, file, provider));
if (file.isDirectory())
for (VirtualFile child : file.getChildren())
mounts.addAll(recursiveMount(child));
return mounts;
}
/**
* Test that one can go from a file uri to VirtualFile and obtain the
* same VirtualFile using VirtualFile vfsfile uri
* @throws Exception
*/
public void testVFSFileURIFactory() throws Exception
{
URL rootURL = getClass().getProtectionDomain().getCodeSource().getLocation();
VFS rootVFS = VFS.getInstance();
VirtualFile root0 = rootVFS.getChild(rootURL.getPath());
VirtualFile root1 = rootVFS.getChild(root0.toURI().getPath());
assertEquals(root0, root1);
}
/**
* Test reading the contents of nested jar entries.
* @throws Exception
*/
public void testInnerJarFile() throws Exception
{
URL rootURL = getResource("/vfs/test");
VFS vfs = VFS.getInstance();
VirtualFile testdir = vfs.getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertTrue("outer.jar != null", outerjar != null);
VirtualFile jar1 = outerjar.getChild("jar1.jar");
assertTrue("outer.jar/jar1.jar != null", jar1 != null);
VirtualFile jar2 = outerjar.getChild("jar2.jar");
assertTrue("outer.jar/jar2.jar != null", jar2 != null);
VirtualFile jar1MF = jar1.getChild("META-INF/MANIFEST.MF");
assertNotNull("jar1!/META-INF/MANIFEST.MF", jar1MF);
InputStream mfIS = jar1MF.openStream();
Manifest mf1 = new Manifest(mfIS);
Attributes mainAttrs1 = mf1.getMainAttributes();
String title1 = mainAttrs1.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1", title1);
mfIS.close();
VirtualFile jar2MF = jar2.getChild("META-INF/MANIFEST.MF");
assertNotNull("jar2!/META-INF/MANIFEST.MF", jar2MF);
InputStream mfIS2 = jar2MF.openStream();
Manifest mf2 = new Manifest(mfIS2);
Attributes mainAttrs2 = mf2.getMainAttributes();
String title2 = mainAttrs2.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar2", title2);
mfIS2.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar
* @throws Exception
*/
public void testFindResource() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("outer.jar != null", jar != null);
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar
* @throws Exception
*/
public void testFindResourceUsingURLStream() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("outer.jar != null", jar != null);
/*
ArrayList<String> searchCtx = new ArrayList<String>();
searchCtx.add("outer.jar");
VirtualFile metaInf = vfs.resolveFile("META-INF/MANIFEST.MF", searchCtx);
*/
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
System.err.println(metaInf.toURL());
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
String urlString = metaInf.toURL().toString();
URL mfURL = new URL(urlString);
mfIS = mfURL.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
mf = new Manifest(mfIS);
mainAttrs = mf.getMainAttributes();
version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a jar that does not
* have parent directory entries.
* @throws Exception
*/
public void testFindResourceInFilesOnlyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("jar1-filesonly.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
assertTrue("jar1-filesonly.jar != null", jar != null);
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1-filesonly", title);
mfIS.close();
String urlString = metaInf.toURL().toString();
URL mfURL = new URL(urlString);
mfIS = mfURL.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
mf = new Manifest(mfIS);
mainAttrs = mf.getMainAttributes();
version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("jar1-filesonly", title);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Basic tests of accessing resources in a war that does not
* have parent directory entries.
* @throws Exception
*/
public void testFindResourceInFilesOnlyWar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile war2 = testdir.getChild("WarDeployApp_web.war");
List<Closeable> mounts = recursiveMount(war2);
try
{
assertTrue("WarDeployApp_web.war != null", war2 != null);
VirtualFile classes2 = war2.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes2 != null);
assertTrue("WEB-INF/classes is not a leaf", classes2.isLeaf() == false);
classes2 = war2.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes2 != null);
assertTrue("WEB-INF/classes is not a leaf", classes2.isLeaf() == false);
VirtualFile HelloJavaBean = classes2.getChild("com/sun/ts/tests/webservices/deploy/warDeploy/HelloJavaBean.class");
assertTrue("HelloJavaBean.class != null", HelloJavaBean != null);
assertTrue("HelloJavaBean.class is a leaf", HelloJavaBean.isLeaf());
VirtualFile war = testdir.getChild("filesonly.war");
mounts.addAll(recursiveMount(war));
assertTrue("filesonly.war != null", war != null);
VirtualFile classes = war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
assertTrue("WEB-INF/classes is not a directory", classes.isDirectory());
VirtualFile jar1 = war.getChild("WEB-INF/lib/jar1.jar");
assertTrue("WEB-INF/lib/jar1.jar != null", jar1 != null);
assertTrue("WEB-INF/lib/jar1.jar is not a leaf", jar1.isLeaf() == false);
VirtualFile ClassInJar1 = jar1.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1.class");
assertTrue("ClassInJar1.class != null", ClassInJar1 != null);
assertTrue("ClassInJar1.class is a leaf", ClassInJar1.isLeaf());
VirtualFile metaInf = war.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.toURL().openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals("filesonly-war", title);
mfIS.close();
war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
assertTrue("WEB-INF/classes is not a leaf", classes.isLeaf() == false);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate iterating over a vfs url from a files only war.
*
* @throws Exception
*/
public void testFindClassesInFilesOnlyWar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile war = testdir.getChild("filesonly.war");
List<Closeable> mounts = recursiveMount(war);
try
{
assertTrue("filesonly.war != null", war != null);
VirtualFile classes = war.getChild("WEB-INF/classes");
assertTrue("WEB-INF/classes != null", classes != null);
HashSet<String> names = new HashSet<String>();
ClassPathIterator iter = new ClassPathIterator(classes.toURL());
ClassPathEntry entry = null;
while ((entry = iter.getNextEntry()) != null)
{
names.add(entry.name);
}
log.debug(names);
assertTrue("org/jboss/test/vfs/support/jar1", names.contains("org/jboss/test/vfs/support/jar1"));
assertTrue("ClassInJar1.class", names.contains("org/jboss/test/vfs/support/jar1/ClassInJar1.class"));
assertTrue("ClassInJar1$InnerClass.class", names.contains("org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testFindResourceUnpackedJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("unpacked-outer.jar");
assertTrue("unpacked-outer.jar != null", jar != null);
/**
ArrayList<String> searchCtx = new ArrayList<String>();
searchCtx.add("unpacked-outer.jar");
VirtualFile metaInf = vfs.resolveFile("META-INF/MANIFEST.MF", searchCtx);
*/
VirtualFile metaInf = jar.getChild("META-INF/MANIFEST.MF");
assertTrue("META-INF/MANIFEST.MF != null", metaInf != null);
InputStream mfIS = metaInf.openStream();
assertTrue("META-INF/MANIFEST.MF.openStream != null", mfIS != null);
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_VERSION);
assertEquals("1.0.0.GA", version);
mfIS.close();
}
/**
* Test simple file resolution without search contexts
* @throws Exception
*/
public void testResolveFile() throws Exception
{
log.info("+++ testResolveFile, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Check resolving the root file
VirtualFile root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerJar);
try
{
assertNotNull("outer.jar", outerJar);
assertEquals("outer.jar name", "outer.jar", outerJar.getName());
assertEquals("outer.jar path", rootURL.getPath() + "/outer.jar", outerJar.getPathName());
VirtualFile outerJarMF = testdir.getChild("outer.jar/META-INF/MANIFEST.MF");
assertNotNull("outer.jar/META-INF/MANIFEST.MF", outerJarMF);
// Test a non-canonical path
rootURL = getResource("/vfs/sundry/../test");
// Check resolving the root file
root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate resolving a .class file given a set of search contexts in the
* vfs that make up a classpath.
*
* @throws Exception
*/
public void testResolveClassFileInClassPath() throws Exception
{
log.info("+++ testResolveFile, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Find ClassInJar1.class
VirtualFile vf = testdir.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(vf);
try
{
VirtualFile c1 = vf.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1.class");
assertNotNull("ClassInJar1.class VF", c1);
log.debug("Found ClassInJar1.class: " + c1);
// Find ClassInJar1$InnerClass.class
VirtualFile c1i = vf.getChild("org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
assertNotNull("ClassInJar1$InnerClass.class VF", c1i);
log.debug("Found ClassInJar1$InnerClass.class: " + c1i);
// Find ClassInJar2.class
vf = testdir.getChild("jar2.jar");
mounts.addAll(recursiveMount(vf));
VirtualFile c2 = vf.getChild("org/jboss/test/vfs/support/jar2/ClassInJar2.class");
assertNotNull("ClassInJar2.class VF", c2);
log.debug("Found ClassInJar2.class: " + c2);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testResolveFileInUnpackedJar() throws Exception
{
log.info("+++ testResolveFileInUnpackedJar, cwd=" + (new File(".").getCanonicalPath()));
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Check resolving the root file
VirtualFile root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
assertNotNull("unpacked-outer.jar", outerJar);
assertEquals("unpacked-outer.jar name", "unpacked-outer.jar", outerJar.getName());
assertEquals("unpacked-outer.jar path", rootURL.getPath() + "/unpacked-outer.jar", outerJar.getPathName());
VirtualFile outerJarMF = testdir.getChild("unpacked-outer.jar/META-INF/MANIFEST.MF");
assertNotNull("unpacked-outer.jar/META-INF/MANIFEST.MF", outerJarMF);
// Check resolving the root file
root = testdir.getChild("");
assertEquals("root name", "test", root.getName());
assertEquals("root path", rootURL.getPath(), root.getPathName());
assertFalse("root isDirectory", root.isLeaf());
}
public void testFileNotFoundInUnpackedJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
// Find the outer.jar
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
assertNotNull("unpacked-outer.jar", outerJar);
assertNull(outerJar.getChild("WEB-INF"));
}
public void testNestedNestedParent() throws Exception
{
// TODO
}
public void testCopyNestedStream() throws Exception
{
// TODO
}
/**
* Test file resolution with nested jars
* @throws Exception
*/
public void testInnerJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = testdir.getChild("outer.jar/jar1.jar");
log.info("IsFile: " + inner.isLeaf());
log.info(inner.getLastModified());
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
assertTrue("jar1.jar children.length(" + contents.size() + ") >= 2", contents.size() >= 2);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testInnerJarUsingURLStream() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = testdir.getChild("outer.jar/jar1.jar");
log.info("IsFile: " + inner.isLeaf());
log.info(inner.getLastModified());
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
assertTrue("jar1.jar children.length(" + contents.size() + ") >= 2", contents.size() >= 2);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.toURL().openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the outer.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScan() throws Exception
{
URL rootURL = getResource("/vfs/test/outer.jar");
VirtualFile outer = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(outer);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(outer.getPathName() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(outer.getPathName() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(outer.getPathName() + "/jar1-filesonly.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(outer.getPathName() + "/jar1-filesonly.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(outer.getPathName() + "/jar2.jar/org/jboss/test/vfs/support/jar2/ClassInJar2.class");
expectedClasses.add(outer.getPathName() + "/org/jboss/test/vfs/support/CommonClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = outer.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 6 classes", 6, count);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the unpacked-outer.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScanUnpacked() throws Exception
{
URL rootURL = getResource("/vfs/test/unpacked-outer.jar");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(testdir);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(rootURL.getPath() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(rootURL.getPath() + "/jar1.jar/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
expectedClasses.add(rootURL.getPath() + "/jar2.jar/org/jboss/test/vfs/support/jar2/ClassInJar2.class");
// FIXME: .class files are not being copied from the resources directory
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/CommonClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = testdir.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 4 classes", 4, count);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test a scan of the jar1-filesonly.jar vfs to locate all .class files
* @throws Exception
*/
public void testClassScanFilesonly() throws Exception
{
URL rootURL = getResource("/vfs/test/jar1-filesonly.jar");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
List<Closeable> mounts = recursiveMount(testdir);
try
{
HashSet<String> expectedClasses = new HashSet<String>();
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/jar1/ClassInJar1.class");
expectedClasses.add(rootURL.getPath() + "/org/jboss/test/vfs/support/jar1/ClassInJar1$InnerClass.class");
super.enableTrace("org.jboss.vfs.util.SuffixMatchFilter");
SuffixMatchFilter classVisitor = new SuffixMatchFilter(".class", VisitorAttributes.RECURSE);
List<VirtualFile> classes = testdir.getChildren(classVisitor);
int count = 0;
for (VirtualFile cf : classes)
{
String path = cf.getPathName();
if (path.endsWith(".class"))
{
assertTrue(path, expectedClasses.contains(path));
count++;
}
}
assertEquals("There were 2 classes", 2, count);
// Make sure we can walk path-wise to the class
VirtualFile parent = testdir;
String className = "org/jboss/test/vfs/support/jar1/ClassInJar1.class";
VirtualFile classInJar1 = testdir.getChild(className);
String[] paths = className.split("/");
StringBuilder vfsPath = new StringBuilder();
for (String path : paths)
{
vfsPath.append(path);
VirtualFile vf = parent.getChild(path);
if (path.equals("ClassInJar1.class"))
assertEquals("ClassInJar1.class", classInJar1, vf);
else
{
assertEquals("vfsPath", testdir.getPathName() + "/" + vfsPath.toString(), vf.getPathName());
// why should this be equal?
// assertEquals("lastModified", classInJar1.getLastModified(), vf.getLastModified());
assertTrue("lastModified", classInJar1.getLastModified() <= vf.getLastModified());
}
vfsPath.append('/');
parent = vf;
}
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test access of directories in a jar that only stores files
* @throws Exception
*/
public void testFilesOnlyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("jar1-filesonly.jar");
List<Closeable> mounts = recursiveMount(jar);
try
{
VirtualFile metadataLocation = jar.getChild("META-INF");
assertNotNull(metadataLocation);
VirtualFile mfFile = metadataLocation.getChild("MANIFEST.MF");
assertNotNull(mfFile);
InputStream is = mfFile.openStream();
Manifest mf = new Manifest(is);
is.close();
String title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1-filesonly", title);
// Retry starting from the jar root
mfFile = jar.getChild("META-INF/MANIFEST.MF");
is = mfFile.openStream();
mf = new Manifest(is);
is.close();
title = mf.getMainAttributes().getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1-filesonly", title);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test the serialization of VirtualFiles
* @throws Exception
*/
public void testVFSerialization() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
tmpRoot.deleteOnExit();
File tmp = new File(tmpRoot, "vfs.ser");
tmp.createNewFile();
tmp.deleteOnExit();
log.info("+++ testVFSerialization, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild("vfs.ser");
FileOutputStream fos = new FileOutputStream(tmp);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(tmpVF);
oos.close();
// Check the tmpVF attributes against the tmp file
long lastModified = tmp.lastModified();
long size = tmp.length();
String name = tmp.getName();
String vfsPath = tmp.getPath();
URL url = tmp.toURI().toURL();
log.debug("name: " + name);
log.debug("vfsPath: " + vfsPath);
log.debug("url: " + url);
log.debug("lastModified: " + lastModified);
log.debug("size: " + size);
assertEquals("name", name, tmpVF.getName());
assertEquals("pathName", vfsPath, tmpVF.getPathName());
assertEquals("lastModified", lastModified, tmpVF.getLastModified());
assertEquals("size", size, tmpVF.getSize());
assertEquals("url", url, tmpVF.toURL());
assertEquals("isLeaf", true, tmpVF.isLeaf());
//assertEquals("isHidden", false, tmpVF.isHidden());
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(tmp);
ObjectInputStream ois = new ObjectInputStream(fis);
VirtualFile tmpVF2 = (VirtualFile)ois.readObject();
ois.close();
// Validated the deserialized attribtes against the tmp file
assertEquals("name", name, tmpVF2.getName());
assertEquals("pathName", vfsPath, tmpVF2.getPathName());
assertEquals("lastModified", lastModified, tmpVF2.getLastModified());
assertEquals("size", size, tmpVF2.getSize());
assertEquals("url", url, tmpVF2.toURL());
assertEquals("isLeaf", true, tmpVF2.isLeaf());
//assertEquals("isHidden", false, tmpVF2.isHidden());
}
/**
* Test the serialization of VirtualFiles representing a jar
* @throws Exception
*/
public void testVFJarSerialization() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
tmpRoot.deleteOnExit();
// Create a test jar containing a txt file
File tmpJar = new File(tmpRoot, "tst.jar");
tmpJar.createNewFile();
tmpJar.deleteOnExit();
FileOutputStream fos = new FileOutputStream(tmpJar);
JarOutputStream jos = new JarOutputStream(fos);
// Write a text file to include in a test jar
JarEntry txtEntry = new JarEntry("tst.txt");
jos.putNextEntry(txtEntry);
txtEntry.setSize("testVFJarSerialization".length());
txtEntry.setTime(System.currentTimeMillis());
jos.write("testVFJarSerialization".getBytes());
jos.close();
log.info("+++ testVFJarSerialization, tmp=" + tmpJar.getCanonicalPath());
URI rootURI = tmpRoot.toURI();
VirtualFile tmp = VFS.getInstance().getChild(rootURI.getPath());
File vfsSer = new File(tmpRoot, "vfs.ser");
vfsSer.createNewFile();
vfsSer.deleteOnExit();
VirtualFile tmpVF = tmp.getChild("tst.jar");
// Validate the vf jar against the tmp file attributes
long lastModified = tmpJar.lastModified();
long size = tmpJar.length();
String name = tmpJar.getName();
String vfsPath = tmpJar.getPath();
URL url = tmpJar.toURL();
//url = JarUtils.createJarURL(url);
log.debug("name: " + name);
log.debug("vfsPath: " + vfsPath);
log.debug("url: " + url);
log.debug("lastModified: " + lastModified);
log.debug("size: " + size);
assertEquals("name", name, tmpVF.getName());
assertEquals("pathName", vfsPath, tmpVF.getPathName());
assertEquals("lastModified", lastModified, tmpVF.getLastModified());
assertEquals("size", size, tmpVF.getSize());
assertEquals("url", url.getPath(), tmpVF.toURL().getPath());
// TODO: these should pass
assertEquals("isDirectory", false, tmpVF.isDirectory());
//assertEquals("isHidden", false, tmpVF.isHidden());
// Write out the vfs jar file
fos = new FileOutputStream(vfsSer);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(tmpVF);
oos.close();
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(vfsSer);
ObjectInputStream ois = new ObjectInputStream(fis);
VirtualFile tmpVF2 = (VirtualFile)ois.readObject();
ois.close();
// Validate the vf jar against the tmp file attributes
assertEquals("name", name, tmpVF2.getName());
assertEquals("pathName", vfsPath, tmpVF2.getPathName());
assertEquals("lastModified", lastModified, tmpVF2.getLastModified());
assertEquals("size", size, tmpVF2.getSize());
assertEquals("url", url.getPath(), tmpVF2.toURL().getPath());
// TODO: these should pass
assertEquals("isDirectory", false, tmpVF2.isDirectory());
//assertEquals("isHidden", false, tmpVF2.isHidden());
}
/**
* Test the serialization of VirtualFiles representing a jar
* @throws Exception
*/
public void testVFNestedJarSerialization() throws Exception
{
// this expects to be run with a working dir of the container root
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outer = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outer);
try
{
VirtualFile inner = outer.getChild("jar1.jar");
File vfsSer = File.createTempFile("testVFNestedJarSerialization", ".ser");
vfsSer.deleteOnExit();
// Write out the vfs inner jar file
FileOutputStream fos = new FileOutputStream(vfsSer);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(inner);
oos.close();
// Read in the VF from the serialized file
FileInputStream fis = new FileInputStream(vfsSer);
ObjectInputStream ois = new ObjectInputStream(fis);
inner = (VirtualFile)ois.readObject();
ois.close();
List<VirtualFile> contents = inner.getChildren();
// META-INF/*, org/jboss/test/vfs/support/jar1/* at least
// TODO - fix this once no_copy serialization is working
int size = 2;
assertTrue("jar1.jar children.length(" + contents.size() + ") is not " + size, contents.size() >= size);
for (VirtualFile vf : contents)
{
log.info(" " + vf.getName());
}
VirtualFile vf = testdir.getChild("outer.jar/jar1.jar");
/*
VirtualFile jar1MF = vf.getChild("META-INF/MANIFEST.MF");
InputStream mfIS = jar1MF.openStream();
Manifest mf = new Manifest(mfIS);
Attributes mainAttrs = mf.getMainAttributes();
String version = mainAttrs.getValue(Attributes.Name.SPECIFICATION_TITLE);
assertEquals(Attributes.Name.SPECIFICATION_TITLE.toString(), "jar1", version);
mfIS.close();
*/
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the URL of a VFS corresponding to a directory ends in '/' so that
* URLs created relative to it are under the directory. This requires that
* build-test.xml artifacts exist.
*
* @throws Exception
*/
public void testDirURLs() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
URL outerURL = outerJar.toURL();
log.debug("outerURL: " + outerURL);
assertTrue(outerURL + " ends in '/'", outerURL.getPath().endsWith("/"));
// Validate that jar1 is under unpacked-outer.jar
URL jar1URL = new URL(outerURL, "jar1.jar/");
log.debug("jar1URL: " + jar1URL + ", path=" + jar1URL.getPath());
assertTrue("jar1URL path ends in unpacked-outer.jar/jar1.jar!/", jar1URL.getPath().endsWith("unpacked-outer.jar/jar1.jar/"));
VirtualFile jar1 = outerJar.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(jar1);
try
{
assertEquals(jar1URL.getPath(), jar1.toURL().getPath());
VirtualFile packedJar = testdir.getChild("jar1.jar");
mounts.addAll(recursiveMount(packedJar));
jar1URL = packedJar.getChild("org/jboss/test/vfs/support").toURL();
assertTrue("Jar directory entry URLs must end in /: " + jar1URL.toString(), jar1URL.toString().endsWith("/"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the URI of a VFS corresponding to a directory ends in '/' so that
* URIs created relative to it are under the directory. This requires that
* build-test.xml artifacts exist.
*
* @throws Exception
*/
public void testDirURIs() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
URI outerURI = outerJar.toURI();
log.debug("outerURI: " + outerURI);
assertTrue(outerURI + " ends in '/'", outerURI.getPath().endsWith("/"));
// Validate that jar1 is under unpacked-outer.jar
URI jar1URI = new URI(outerURI + "jar1.jar/");
log.debug("jar1URI: " + jar1URI + ", path=" + jar1URI.getPath());
assertTrue("jar1URI path ends in unpacked-outer.jar/jar1.jar!/", jar1URI.getPath().endsWith("unpacked-outer.jar/jar1.jar/"));
VirtualFile jar1 = outerJar.getChild("jar1.jar");
List<Closeable> mounts = recursiveMount(jar1);
try
{
assertEquals(jar1URI.getPath(), jar1.toURI().getPath());
VirtualFile packedJar = testdir.getChild("jar1.jar");
mounts.addAll(recursiveMount(packedJar));
jar1URI = packedJar.getChild("org/jboss/test/vfs/support").toURI();
assertTrue("Jar directory entry URLs must end in /: " + jar1URI.toString(), jar1URI.toString().endsWith("/"));
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test copying a jar
*
* @throws Exception
*/
public void testCopyJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile jar = testdir.getChild("outer.jar");
assertTrue("outer.jar != null", jar != null);
File tmpJar = File.createTempFile("testCopyJar", ".jar");
tmpJar.deleteOnExit();
try
{
InputStream is = jar.openStream();
FileOutputStream fos = new FileOutputStream(tmpJar);
byte[] buffer = new byte[1024];
int read;
while ((read = is.read(buffer)) > 0)
{
fos.write(buffer, 0, read);
}
fos.close();
log.debug("outer.jar size is: " + jar.getSize());
log.debug(tmpJar.getAbsolutePath() + " size is: " + tmpJar.length());
assertTrue("outer.jar > 0", jar.getSize() > 0);
assertEquals("copy jar size", jar.getSize(), tmpJar.length());
is.close();
}
finally
{
try
{
tmpJar.delete();
}
catch (Exception ignore)
{
}
}
}
/**
* Test copying a jar that is nested in another jar.
*
* @throws Exception
*/
public void testCopyInnerJar() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outer.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertTrue("outer.jar != null", outerjar != null);
VirtualFile jar = outerjar.getChild("jar1.jar");
assertTrue("outer.jar/jar1.jar != null", jar != null);
File tmpJar = File.createTempFile("testCopyInnerJar", ".jar");
tmpJar.deleteOnExit();
try
{
InputStream is = jar.openStream();
FileOutputStream fos = new FileOutputStream(tmpJar);
byte[] buffer = new byte[1024];
int read;
while ((read = is.read(buffer)) > 0)
{
fos.write(buffer, 0, read);
}
fos.close();
log.debug("outer.jar/jar1.jar size is: " + jar.getSize());
log.debug(tmpJar.getAbsolutePath() + " size is: " + tmpJar.length());
assertTrue("outer.jar > 0", jar.getSize() > 0);
assertEquals("copy jar size", jar.getSize(), tmpJar.length());
is.close();
}
finally
{
try
{
tmpJar.delete();
}
catch (Exception ignore)
{
}
}
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that the outermf.jar manifest classpath is parsed
* correctly.
*
* @throws Exception
*/
public void testManifestClasspath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("outermf.jar");
List<Closeable> mounts = recursiveMount(outerjar);
try
{
assertNotNull("outermf.jar != null", outerjar);
ArrayList<VirtualFile> cp = new ArrayList<VirtualFile>();
VFSUtils.addManifestLocations(outerjar, cp);
// The p0.jar should be found in the classpath
assertEquals("cp size 2", 2, cp.size());
assertEquals("jar1.jar == cp[0]", "jar1.jar", cp.get(0).getName());
assertEquals("jar2.jar == cp[1]", "jar2.jar", cp.get(1).getName());
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Test that an inner-inner jar that is extracted does not blowup
* the addManifestLocations routine.
*
* @throws Exception
*/
public void testInnerManifestClasspath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile outerjar = testdir.getChild("withalong/rootprefix/outermf.jar");
assertNotNull(outerjar);
List<Closeable> mounts = recursiveMount(outerjar);
try
{
VirtualFile jar1 = outerjar.getChild("jar1.jar");
assertNotNull(jar1);
VirtualFile jar2 = outerjar.getChild("jar2.jar");
assertNotNull(jar2);
VirtualFile innerjar = outerjar.getChild("innermf.jar");
assertNotNull("innermf.jar != null", innerjar);
ArrayList<VirtualFile> cp = new ArrayList<VirtualFile>();
VFSUtils.addManifestLocations(innerjar, cp);
assertEquals(2, cp.size());
VirtualFile cp0 = cp.get(0);
assertEquals(jar1, cp0);
VirtualFile cp1 = cp.get(1);
assertEquals(jar2, cp1);
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate accessing an packed jar vf and its uri when the vfs path
* contains spaces
* @throws Exception
*/
public void testJarWithSpacesInPath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tstjar = testdir.getChild("path with spaces/tst.jar");
List<Closeable> mounts = recursiveMount(tstjar);
try
{
assertNotNull("tstjar != null", tstjar);
URI uri = tstjar.toURI();
URI expectedURI = new URI("vfs" + rootURL.toString() + "/path%20with%20spaces/tst.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
InputStream is = uri.toURL().openStream();
is.close();
tstjar = testdir.getChild("path with spaces/tst%20nospace.jar");
mounts.addAll(recursiveMount(tstjar));
assertNotNull("tstjar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/path%20with%20spaces/tst%2520nospace.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
is = uri.toURL().openStream();
is.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
public void testJarWithSpacesInContext() throws Exception
{
URL rootURL = getResource("/vfs/test/path with spaces");
VirtualFile testdir = VFS.getInstance().getChild(URLDecoder.decode(rootURL.getPath(), "UTF-8"));
VirtualFile tstear = testdir.getChild("spaces.ear");
List<Closeable> mounts = recursiveMount(tstear);
try
{
assertNotNull("spaces.ear != null", tstear);
assertTrue(tstear.isDirectory());
URI uri = tstear.toURI();
URI expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/");
assertEquals(expectedURI.getPath(), uri.getPath());
InputStream is = uri.toURL().openStream();
is.close();
VirtualFile tstjar = tstear.getChild("spaces-ejb.jar");
assertNotNull("spaces-ejb.jar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/spaces-ejb.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
assertFalse(tstjar.isLeaf());
is = uri.toURL().openStream();
is.close();
tstjar = tstear.getChild("spaces-lib.jar");
assertNotNull("spaces-lib.jar != null", tstjar);
uri = tstjar.toURI();
expectedURI = new URI("vfs" + rootURL.toString() + "/spaces.ear/spaces-lib.jar/");
assertEquals(expectedURI.getPath(), uri.getPath());
assertFalse(tstjar.isLeaf());
is = uri.toURL().openStream();
is.close();
}
finally
{
VFSUtils.safeClose(mounts);
}
}
/**
* Validate accessing an unpacked jar vf and its uri when the vfs path
* contains spaces
* @throws Exception
*/
public void testUnpackedJarWithSpacesInPath() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tstjar = testdir.getChild("path with spaces/unpacked-tst.jar");
assertNotNull("tstjar != null", tstjar);
URI uri = tstjar.toURI();
URI expectedURI = new URI(rootURL.toString() + "/path%20with%20spaces/unpacked-tst.jar/");
assertEquals(uri, expectedURI);
}
// /**
// * Tests that we can find the META-INF/some-data.xml in an unpacked deployment
// *
// * @throws Exception for any error
// */
// public void testGetMetaDataUnpackedJar() throws Exception
// {
// testGetMetaDataFromJar("unpacked-with-metadata.jar");
// }
//
// /**
// * Tests that we can find the META-INF/some-data.xml in a packed deployment
// *
// * @throws Exception for any error
// */
// public void testGetMetaDataPackedJar() throws Exception
// {
// testGetMetaDataFromJar("with-metadata.jar");
// }
// private void testGetMetaDataFromJar(String name) throws Exception
// {
// URL rootURL = getResource("/vfs/test");
// VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
//
// VirtualFile jar = testdir.getChild(name);
// assertNotNull(jar);
// VirtualFile metadataLocation = jar.getChild("META-INF");
// assertNotNull(metadataLocation);
//
// VirtualFile metadataByName = metadataLocation.getChild("some-data.xml");
// assertNotNull(metadataByName);
//
// //This is the same code as is called by AbstractDeploymentContext.getMetaDataFiles(String name, String suffix).
// //The MetaDataMatchFilter is a copy of the one used there
// List<VirtualFile> metaDataList = metadataLocation.getChildren(new MetaDataMatchFilter(null, "-data.xml"));
// assertNotNull(metaDataList);
// assertEquals("Wrong size", 1, metaDataList.size());
// }
/**
* Validate that a URLClassLoader.findReource/getResourceAsStream calls for non-existing absolute
* resources that should fail as expected with null results. Related to JBMICROCONT-139.
*
* @throws Exception
*/
public void testURLClassLoaderFindResourceFailure() throws Exception
{
URL rootURL = getResource("/vfs/test");
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
URL[] cp = { testdir.toURL() };
URLClassLoader ucl = new URLClassLoader(cp);
// Search for a non-existent resource
URL qp = ucl.findResource("nosuch-quartz.props");
assertNull("findResource(nosuch-quartz.props)", qp);
InputStream is = ucl.getResourceAsStream("nosuch-quartz.props");
assertNull("getResourceAsStream(nosuch-quartz.props)", is);
}
/**
* Test VirtualFile.exists for vfsfile based urls.
*
* @throws Exception
*/
public void testFileExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testFileExists", null, tmpRoot);
log.info("+++ testFileExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue("tmp.delete()", tmpVF.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsfile based urls for a directory.
*
* @throws Exception
*/
public void testDirFileExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testFileExists", null, tmpRoot);
assertTrue(tmp + ".delete()", tmp.delete());
assertTrue(tmp + ".mkdir()", tmp.mkdir());
log.info("+++ testDirFileExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertFalse(tmpVF.getPathName() + ".isLeaf()", tmpVF.isLeaf());
assertTrue(tmp + ".delete()", tmp.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsjar based urls.
*
* @throws Exception
*/
public void testJarExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmpJar = File.createTempFile("testJarExists", ".jar", tmpRoot);
log.info("+++ testJarExists, tmpJar=" + tmpJar.getCanonicalPath());
Manifest mf = new Manifest();
mf.getMainAttributes().putValue("Created-By", "FileVFSUnitTestCase.testJarExists");
FileOutputStream fos = new FileOutputStream(tmpJar);
JarOutputStream jos = new JarOutputStream(fos, mf);
jos.setComment("testJarExists");
jos.setLevel(0);
jos.close();
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmpJar.getName());
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpVF.getPathName() + ".size() > 0", tmpVF.getSize() > 0);
assertTrue("tmp.delete()", tmpVF.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.exists for vfsjar based urls for a directory.
*
* @throws Exception
*/
public void testDirJarExists() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
tmpRoot.delete();
tmpRoot.mkdir();
File tmp = File.createTempFile("testDirJarExists", ".jar", tmpRoot);
assertTrue(tmp + ".delete()", tmp.delete());
assertTrue(tmp + ".mkdir()", tmp.mkdir());
log.info("+++ testDirJarExists, tmp=" + tmp.getCanonicalPath());
URL rootURL = tmpRoot.toURI().toURL();
VirtualFile testdir = VFS.getInstance().getChild(rootURL.getPath());
VirtualFile tmpVF = testdir.getChild(tmp.getName());
log.info(tmpVF);
assertTrue(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertFalse(tmpVF.getPathName() + ".isLeaf()", tmpVF.isLeaf());
assertTrue(tmp + ".delete()", tmp.delete());
assertFalse(tmpVF.getPathName() + ".exists()", tmpVF.exists());
assertTrue(tmpRoot + ".delete()", tmpRoot.delete());
}
/**
* Test VirtualFile.delete() for file based urls
*
* @throws Exception
*/
public void testFileDelete() throws Exception
{
File tmpRoot = File.createTempFile("vfs", ".root");
VirtualFile root = VFS.getInstance().getChild(tmpRoot.getPath());
// non-existent directory - exists() not
tmpRoot.delete();
assertFalse(tmpRoot + ".exits() == false", root.exists());
// existing directory - exists(), delete()
tmpRoot.mkdir();
assertTrue(tmpRoot + ".exits()", root.exists());
assertTrue(tmpRoot + ".delete()", root.delete());
tmpRoot.mkdir();
// non-empty directory - delete()
File tmp = new File(tmpRoot, "testFileDelete.jar");
assertTrue(tmp.mkdir());
File tmp2 = File.createTempFile("testFileDelete2", ".jar", tmp);
assertTrue(tmp2.exists());
VirtualFile tmpDeletable = VFS.getRoot(tmp.toURI());
assertFalse(tmpRoot + ".delete() == false", tmpDeletable.delete());
// children() exist
List<VirtualFile> children = root.getChildren();
assertEquals(tmpRoot + ".getChildren().size() == 1", 1, children.size());
// specific child exists(), delete(), exists() not
VirtualFile tmpVF = root.getChild(tmp.getName());
assertTrue(tmp + ".exists()", tmpVF.exists());
assertTrue(tmp + ".delete()", tmp2.delete());
assertTrue(tmp + ".delete()", tmpVF.delete());
assertFalse(tmp + ".exists() == false", tmpVF.exists());
// children() don't exist
children = root.getChildren();
assertTrue(tmpRoot + ".getChildren().size() == 0", children.size() == 0);
// getChild() returns null
tmpVF = root.getChild(tmp.getName());
assertNull(tmpRoot + ".getChild('" + tmp.getName() + "') == null", tmpVF);
// directory delete()
assertTrue(tmpRoot + ".delete()", root.delete());
}
/**
* Test for <em>caseSensitive=true</em>
*
* If this test passes on unixes, it doesn't mean much, because there it should pass without
* case sensitivity turned on as well.
*
* If it passes on windows, it means the functionality works as expected.
*
* @throws Exception for any error
*/
// public void testCaseSensitive() throws Exception
// {
// URL rootURL = getResource("/vfs");
//
// FileSystemContext ctx = new FileSystemContext(new URL(rootURL.toString() + "?caseSensitive=true"));
// VirtualFileHandler root = ctx.getRoot();
//
// String path = "context/file/simple/child";
// VirtualFileHandler child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/file/simple/CHILD";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.jar";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/JAR/archive.jar";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.JAR";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
//
// path = "context/jar/archive.jar/child";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child != null);
//
// path = "context/jar/archive.jar/CHILD";
// child = root.getChild(path);
// assertTrue("getChild('" + path + "')", child == null);
// }
}
| Fix tests
| src/test/java/org/jboss/test/vfs/FileVFSUnitTestCase.java | Fix tests | <ide><path>rc/test/java/org/jboss/test/vfs/FileVFSUnitTestCase.java
<ide> // Find the outer.jar
<ide> VirtualFile outerJar = testdir.getChild("unpacked-outer.jar");
<ide> assertNotNull("unpacked-outer.jar", outerJar);
<del> assertNull(outerJar.getChild("WEB-INF"));
<add> assertFalse(outerJar.getChild("WEB-INF").exists());
<ide> }
<ide>
<ide> public void testNestedNestedParent() throws Exception
<ide> // children() don't exist
<ide> children = root.getChildren();
<ide> assertTrue(tmpRoot + ".getChildren().size() == 0", children.size() == 0);
<del>
<del> // getChild() returns null
<del> tmpVF = root.getChild(tmp.getName());
<del> assertNull(tmpRoot + ".getChild('" + tmp.getName() + "') == null", tmpVF);
<ide>
<ide> // directory delete()
<ide> assertTrue(tmpRoot + ".delete()", root.delete()); |
|
Java | apache-2.0 | error: pathspec 'jetty-server/src/test/java/org/eclipse/jetty/server/handler/RequestLogHandlerTest.java' did not match any file(s) known to git
| cb4cc12a75c0a9205a9dde81d8861d057a6f8b26 | 1 | xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read | //
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.server.handler;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.continuation.Continuation;
import org.eclipse.jetty.continuation.ContinuationListener;
import org.eclipse.jetty.continuation.Servlet3Continuation;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.RequestLog;
import org.eclipse.jetty.server.Response;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.toolchain.test.IO;
import org.eclipse.jetty.util.StringUtil;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
@Ignore
public class RequestLogHandlerTest
{
private static final Logger LOG = Log.getLogger(RequestLogHandlerTest.class);
public static class CaptureLog extends AbstractLifeCycle implements RequestLog
{
public List<String> captured = new ArrayList<String>();
public void log(Request request, Response response)
{
captured.add(String.format("%s %s %s %03d",request.getMethod(),request.getUri().toString(),request.getProtocol(),response.getStatus()));
}
}
private static abstract class AbstractTestHandler extends AbstractHandler
{
@Override
public String toString()
{
return this.getClass().getSimpleName();
}
}
private static class HelloHandler extends AbstractTestHandler
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
response.setContentType("text/plain");
response.getWriter().print("Hello World");
baseRequest.setHandled(true);
}
}
private static class ResponseSendErrorHandler extends AbstractTestHandler
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
response.sendError(500, "Whoops");
baseRequest.setHandled(true);
}
}
private static class ServletExceptionHandler extends AbstractTestHandler
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
throw new ServletException("Whoops");
}
}
private static class IOExceptionHandler extends AbstractTestHandler
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
throw new IOException("Whoops");
}
}
private static class RuntimeExceptionHandler extends AbstractTestHandler
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
throw new RuntimeException("Whoops");
}
}
private static class ContinuationOnTimeoutCompleteHandler extends AbstractTestHandler implements ContinuationListener
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
Continuation ac = new Servlet3Continuation(request);
ac.setTimeout(1000);
ac.addContinuationListener(this);
baseRequest.setHandled(true);
}
public void onComplete(Continuation continuation)
{
}
public void onTimeout(Continuation continuation)
{
continuation.complete();
}
}
private static class ContinuationOnTimeoutCompleteUnhandledHandler extends AbstractTestHandler implements ContinuationListener
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
Continuation ac = new Servlet3Continuation(request);
ac.setTimeout(1000);
ac.addContinuationListener(this);
}
public void onComplete(Continuation continuation)
{
}
public void onTimeout(Continuation continuation)
{
continuation.complete();
}
}
private static class ContinuationOnTimeoutRuntimeExceptionHandler extends AbstractTestHandler implements ContinuationListener
{
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
Continuation ac = new Servlet3Continuation(request);
ac.setTimeout(1000);
ac.addContinuationListener(this);
baseRequest.setHandled(true);
}
public void onComplete(Continuation continuation)
{
}
public void onTimeout(Continuation continuation)
{
throw new RuntimeException("Ooops");
}
}
@Parameters(name="{0}")
public static List<Object[]> data()
{
List<Object[]> data = new ArrayList<Object[]>();
data.add(new Object[] { new HelloHandler(), "/test", "GET /test HTTP/1.1 200" });
data.add(new Object[] { new ContinuationOnTimeoutCompleteHandler(), "/test", "GET /test HTTP/1.1 200" });
data.add(new Object[] { new ContinuationOnTimeoutCompleteUnhandledHandler(), "/test", "GET /test HTTP/1.1 200" });
data.add(new Object[] { new ContinuationOnTimeoutRuntimeExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
data.add(new Object[] { new ResponseSendErrorHandler(), "/test", "GET /test HTTP/1.1 500" });
data.add(new Object[] { new ServletExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
data.add(new Object[] { new IOExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
data.add(new Object[] { new RuntimeExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
return data;
}
@Parameter(0)
public Handler testHandler;
@Parameter(1)
public String requestPath;
@Parameter(2)
public String expectedLogEntry;
@Test(timeout=4000)
public void testLogHandlerCollection() throws Exception
{
Server server = new Server();
SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(0);
server.setConnectors(new Connector[] { connector });
CaptureLog captureLog = new CaptureLog();
RequestLogHandler requestLog = new RequestLogHandler();
requestLog.setRequestLog(captureLog);
HandlerCollection handlers = new HandlerCollection();
handlers.setHandlers(new Handler[] { testHandler, requestLog });
server.setHandler(handlers);
try
{
server.start();
String host = connector.getHost();
if (host == null)
{
host = "localhost";
}
int port = connector.getLocalPort();
URI serverUri = new URI("http",null,host,port,requestPath,null,null);
// Make call to test handler
HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection();
try
{
connection.setAllowUserInteraction(false);
// log response status code
int statusCode = connection.getResponseCode();
LOG.debug("Response Status Code: {}",statusCode);
if (statusCode == 200)
{
// collect response message and log it
String content = getResponseContent(connection);
LOG.debug("Response Content: {}",content);
}
}
finally
{
connection.disconnect();
}
assertRequestLog(captureLog);
}
finally
{
server.stop();
}
}
@Test(timeout=4000)
public void testLogHandlerWrapped() throws Exception
{
Server server = new Server();
SelectChannelConnector connector = new SelectChannelConnector();
connector.setPort(0);
server.setConnectors(new Connector[] { connector });
CaptureLog captureLog = new CaptureLog();
RequestLogHandler requestLog = new RequestLogHandler();
requestLog.setRequestLog(captureLog);
requestLog.setHandler(testHandler);
server.setHandler(requestLog);
try
{
server.start();
String host = connector.getHost();
if (host == null)
{
host = "localhost";
}
int port = connector.getLocalPort();
URI serverUri = new URI("http",null,host,port,"/test",null,null);
// Make call to test handler
HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection();
try
{
connection.setAllowUserInteraction(false);
// log response status code
int statusCode = connection.getResponseCode();
LOG.info("Response Status Code: {}",statusCode);
if (statusCode == 200)
{
// collect response message and log it
String content = getResponseContent(connection);
LOG.info("Response Content: {}",content);
}
}
finally
{
connection.disconnect();
}
assertRequestLog(captureLog);
}
finally
{
server.stop();
}
}
private void assertRequestLog(CaptureLog captureLog)
{
int captureCount = captureLog.captured.size();
if (captureCount != 1)
{
LOG.warn("Capture Log size is {}, expected to be 1",captureCount);
if (captureCount > 1)
{
for (int i = 0; i < captureCount; i++)
{
LOG.warn("[{}] {}",i,captureLog.captured.get(i));
}
}
assertThat("Capture Log Entry Count",captureLog.captured.size(),is(1));
}
String actual = captureLog.captured.get(0);
assertThat("Capture Log",actual,is(expectedLogEntry));
}
private String getResponseContent(HttpURLConnection connection) throws IOException
{
InputStream in = null;
InputStreamReader reader = null;
try
{
in = connection.getInputStream();
reader = new InputStreamReader(in,StringUtil.__UTF8_CHARSET);
StringWriter writer = new StringWriter();
IO.copy(reader,writer);
return writer.toString();
}
finally
{
IO.close(reader);
IO.close(in);
}
}
} | jetty-server/src/test/java/org/eclipse/jetty/server/handler/RequestLogHandlerTest.java | Adding RequestLogHandlerTest for HandlerCollection and Wrapped testing
+ 10 out of 16 test cases fail.
| jetty-server/src/test/java/org/eclipse/jetty/server/handler/RequestLogHandlerTest.java | Adding RequestLogHandlerTest for HandlerCollection and Wrapped testing | <ide><path>etty-server/src/test/java/org/eclipse/jetty/server/handler/RequestLogHandlerTest.java
<add>//
<add>// ========================================================================
<add>// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
<add>// ------------------------------------------------------------------------
<add>// All rights reserved. This program and the accompanying materials
<add>// are made available under the terms of the Eclipse Public License v1.0
<add>// and Apache License v2.0 which accompanies this distribution.
<add>//
<add>// The Eclipse Public License is available at
<add>// http://www.eclipse.org/legal/epl-v10.html
<add>//
<add>// The Apache License v2.0 is available at
<add>// http://www.opensource.org/licenses/apache2.0.php
<add>//
<add>// You may elect to redistribute this code under either of these licenses.
<add>// ========================================================================
<add>//
<add>
<add>package org.eclipse.jetty.server.handler;
<add>
<add>import static org.hamcrest.Matchers.*;
<add>import static org.junit.Assert.*;
<add>
<add>import java.io.IOException;
<add>import java.io.InputStream;
<add>import java.io.InputStreamReader;
<add>import java.io.StringWriter;
<add>import java.net.HttpURLConnection;
<add>import java.net.URI;
<add>import java.util.ArrayList;
<add>import java.util.List;
<add>
<add>import javax.servlet.ServletException;
<add>import javax.servlet.http.HttpServletRequest;
<add>import javax.servlet.http.HttpServletResponse;
<add>
<add>import org.eclipse.jetty.continuation.Continuation;
<add>import org.eclipse.jetty.continuation.ContinuationListener;
<add>import org.eclipse.jetty.continuation.Servlet3Continuation;
<add>import org.eclipse.jetty.server.Connector;
<add>import org.eclipse.jetty.server.Handler;
<add>import org.eclipse.jetty.server.Request;
<add>import org.eclipse.jetty.server.RequestLog;
<add>import org.eclipse.jetty.server.Response;
<add>import org.eclipse.jetty.server.Server;
<add>import org.eclipse.jetty.server.nio.SelectChannelConnector;
<add>import org.eclipse.jetty.toolchain.test.IO;
<add>import org.eclipse.jetty.util.StringUtil;
<add>import org.eclipse.jetty.util.component.AbstractLifeCycle;
<add>import org.eclipse.jetty.util.log.Log;
<add>import org.eclipse.jetty.util.log.Logger;
<add>import org.junit.Ignore;
<add>import org.junit.Test;
<add>import org.junit.runner.RunWith;
<add>import org.junit.runners.Parameterized;
<add>import org.junit.runners.Parameterized.Parameter;
<add>import org.junit.runners.Parameterized.Parameters;
<add>
<add>@RunWith(Parameterized.class)
<add>@Ignore
<add>public class RequestLogHandlerTest
<add>{
<add> private static final Logger LOG = Log.getLogger(RequestLogHandlerTest.class);
<add>
<add> public static class CaptureLog extends AbstractLifeCycle implements RequestLog
<add> {
<add> public List<String> captured = new ArrayList<String>();
<add>
<add> public void log(Request request, Response response)
<add> {
<add> captured.add(String.format("%s %s %s %03d",request.getMethod(),request.getUri().toString(),request.getProtocol(),response.getStatus()));
<add> }
<add> }
<add>
<add> private static abstract class AbstractTestHandler extends AbstractHandler
<add> {
<add> @Override
<add> public String toString()
<add> {
<add> return this.getClass().getSimpleName();
<add> }
<add> }
<add>
<add> private static class HelloHandler extends AbstractTestHandler
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> response.setContentType("text/plain");
<add> response.getWriter().print("Hello World");
<add> baseRequest.setHandled(true);
<add> }
<add> }
<add>
<add> private static class ResponseSendErrorHandler extends AbstractTestHandler
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> response.sendError(500, "Whoops");
<add> baseRequest.setHandled(true);
<add> }
<add> }
<add>
<add> private static class ServletExceptionHandler extends AbstractTestHandler
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> throw new ServletException("Whoops");
<add> }
<add> }
<add>
<add> private static class IOExceptionHandler extends AbstractTestHandler
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> throw new IOException("Whoops");
<add> }
<add> }
<add>
<add> private static class RuntimeExceptionHandler extends AbstractTestHandler
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> throw new RuntimeException("Whoops");
<add> }
<add> }
<add>
<add> private static class ContinuationOnTimeoutCompleteHandler extends AbstractTestHandler implements ContinuationListener
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> Continuation ac = new Servlet3Continuation(request);
<add> ac.setTimeout(1000);
<add> ac.addContinuationListener(this);
<add> baseRequest.setHandled(true);
<add> }
<add>
<add> public void onComplete(Continuation continuation)
<add> {
<add> }
<add>
<add> public void onTimeout(Continuation continuation)
<add> {
<add> continuation.complete();
<add> }
<add> }
<add>
<add> private static class ContinuationOnTimeoutCompleteUnhandledHandler extends AbstractTestHandler implements ContinuationListener
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> Continuation ac = new Servlet3Continuation(request);
<add> ac.setTimeout(1000);
<add> ac.addContinuationListener(this);
<add> }
<add>
<add> public void onComplete(Continuation continuation)
<add> {
<add> }
<add>
<add> public void onTimeout(Continuation continuation)
<add> {
<add> continuation.complete();
<add> }
<add> }
<add>
<add> private static class ContinuationOnTimeoutRuntimeExceptionHandler extends AbstractTestHandler implements ContinuationListener
<add> {
<add> public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
<add> {
<add> Continuation ac = new Servlet3Continuation(request);
<add> ac.setTimeout(1000);
<add> ac.addContinuationListener(this);
<add> baseRequest.setHandled(true);
<add> }
<add>
<add> public void onComplete(Continuation continuation)
<add> {
<add> }
<add>
<add> public void onTimeout(Continuation continuation)
<add> {
<add> throw new RuntimeException("Ooops");
<add> }
<add> }
<add>
<add> @Parameters(name="{0}")
<add> public static List<Object[]> data()
<add> {
<add> List<Object[]> data = new ArrayList<Object[]>();
<add>
<add> data.add(new Object[] { new HelloHandler(), "/test", "GET /test HTTP/1.1 200" });
<add> data.add(new Object[] { new ContinuationOnTimeoutCompleteHandler(), "/test", "GET /test HTTP/1.1 200" });
<add> data.add(new Object[] { new ContinuationOnTimeoutCompleteUnhandledHandler(), "/test", "GET /test HTTP/1.1 200" });
<add>
<add> data.add(new Object[] { new ContinuationOnTimeoutRuntimeExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
<add> data.add(new Object[] { new ResponseSendErrorHandler(), "/test", "GET /test HTTP/1.1 500" });
<add> data.add(new Object[] { new ServletExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
<add> data.add(new Object[] { new IOExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
<add> data.add(new Object[] { new RuntimeExceptionHandler(), "/test", "GET /test HTTP/1.1 500" });
<add>
<add> return data;
<add> }
<add>
<add> @Parameter(0)
<add> public Handler testHandler;
<add>
<add> @Parameter(1)
<add> public String requestPath;
<add>
<add> @Parameter(2)
<add> public String expectedLogEntry;
<add>
<add> @Test(timeout=4000)
<add> public void testLogHandlerCollection() throws Exception
<add> {
<add> Server server = new Server();
<add>
<add> SelectChannelConnector connector = new SelectChannelConnector();
<add> connector.setPort(0);
<add> server.setConnectors(new Connector[] { connector });
<add>
<add> CaptureLog captureLog = new CaptureLog();
<add>
<add> RequestLogHandler requestLog = new RequestLogHandler();
<add> requestLog.setRequestLog(captureLog);
<add>
<add> HandlerCollection handlers = new HandlerCollection();
<add> handlers.setHandlers(new Handler[] { testHandler, requestLog });
<add> server.setHandler(handlers);
<add>
<add> try
<add> {
<add> server.start();
<add>
<add> String host = connector.getHost();
<add> if (host == null)
<add> {
<add> host = "localhost";
<add> }
<add> int port = connector.getLocalPort();
<add>
<add> URI serverUri = new URI("http",null,host,port,requestPath,null,null);
<add>
<add> // Make call to test handler
<add> HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection();
<add> try
<add> {
<add> connection.setAllowUserInteraction(false);
<add>
<add> // log response status code
<add> int statusCode = connection.getResponseCode();
<add> LOG.debug("Response Status Code: {}",statusCode);
<add>
<add> if (statusCode == 200)
<add> {
<add> // collect response message and log it
<add> String content = getResponseContent(connection);
<add> LOG.debug("Response Content: {}",content);
<add> }
<add> }
<add> finally
<add> {
<add> connection.disconnect();
<add> }
<add>
<add> assertRequestLog(captureLog);
<add> }
<add> finally
<add> {
<add> server.stop();
<add> }
<add> }
<add>
<add> @Test(timeout=4000)
<add> public void testLogHandlerWrapped() throws Exception
<add> {
<add> Server server = new Server();
<add> SelectChannelConnector connector = new SelectChannelConnector();
<add> connector.setPort(0);
<add> server.setConnectors(new Connector[] { connector });
<add>
<add> CaptureLog captureLog = new CaptureLog();
<add>
<add> RequestLogHandler requestLog = new RequestLogHandler();
<add> requestLog.setRequestLog(captureLog);
<add>
<add> requestLog.setHandler(testHandler);
<add>
<add> server.setHandler(requestLog);
<add>
<add> try
<add> {
<add> server.start();
<add>
<add> String host = connector.getHost();
<add> if (host == null)
<add> {
<add> host = "localhost";
<add> }
<add> int port = connector.getLocalPort();
<add>
<add> URI serverUri = new URI("http",null,host,port,"/test",null,null);
<add>
<add> // Make call to test handler
<add> HttpURLConnection connection = (HttpURLConnection)serverUri.toURL().openConnection();
<add> try
<add> {
<add> connection.setAllowUserInteraction(false);
<add>
<add> // log response status code
<add> int statusCode = connection.getResponseCode();
<add> LOG.info("Response Status Code: {}",statusCode);
<add>
<add> if (statusCode == 200)
<add> {
<add> // collect response message and log it
<add> String content = getResponseContent(connection);
<add> LOG.info("Response Content: {}",content);
<add> }
<add> }
<add> finally
<add> {
<add> connection.disconnect();
<add> }
<add>
<add> assertRequestLog(captureLog);
<add> }
<add> finally
<add> {
<add> server.stop();
<add> }
<add> }
<add>
<add> private void assertRequestLog(CaptureLog captureLog)
<add> {
<add> int captureCount = captureLog.captured.size();
<add>
<add> if (captureCount != 1)
<add> {
<add> LOG.warn("Capture Log size is {}, expected to be 1",captureCount);
<add> if (captureCount > 1)
<add> {
<add> for (int i = 0; i < captureCount; i++)
<add> {
<add> LOG.warn("[{}] {}",i,captureLog.captured.get(i));
<add> }
<add> }
<add> assertThat("Capture Log Entry Count",captureLog.captured.size(),is(1));
<add> }
<add>
<add> String actual = captureLog.captured.get(0);
<add> assertThat("Capture Log",actual,is(expectedLogEntry));
<add> }
<add>
<add> private String getResponseContent(HttpURLConnection connection) throws IOException
<add> {
<add> InputStream in = null;
<add> InputStreamReader reader = null;
<add>
<add> try
<add> {
<add> in = connection.getInputStream();
<add> reader = new InputStreamReader(in,StringUtil.__UTF8_CHARSET);
<add> StringWriter writer = new StringWriter();
<add> IO.copy(reader,writer);
<add> return writer.toString();
<add> }
<add> finally
<add> {
<add> IO.close(reader);
<add> IO.close(in);
<add> }
<add> }
<add>} |
|
Java | mit | abe54ab1b10185db8b4775f8fbadc4d69e1684a8 | 0 | robertmathew/PopularMoviesApp | package com.robo.popularmoviesapp.fragments;
import android.app.Fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.robo.popularmoviesapp.Movie;
import com.robo.popularmoviesapp.R;
import com.robo.popularmoviesapp.Review;
import com.robo.popularmoviesapp.Utility;
import com.robo.popularmoviesapp.activities.ReviewActivity;
import com.robo.popularmoviesapp.adapters.MovieTrailerAdapter;
import com.robo.popularmoviesapp.asynctask.FetchMovieInfoTask;
import com.robo.popularmoviesapp.asynctask.FetchMovieReviewTask;
import com.robo.popularmoviesapp.asynctask.FetchMovieVideoTask;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
public class DetailFragment extends Fragment {
//Poster and backdrop URL
private final static String POSTER_BASE_URL = "http://image.tmdb.org/t/p/";
private final String LOG_TAG = DetailFragment.class.getSimpleName();
private final String POSTER_SIZE_PATH = "w342";
private final String BACKDROP_SIZE_PATH = "w780";
ImageView imgPoster, imgBackdrop;
TextView tvRating, tvRelease, tvPlot;
TextView tvUsername, tvContent;
Button btnMoreReview;
LinearLayout reviewLayout;
MovieTrailerAdapter movieTrailerAdapter;
LinearLayout trailerLayout;
private String id, title;
private String mBackdrop, mPoster, mRating, mReleaseDate, mPlot;
private ArrayList<Movie> trailerList = new ArrayList<>();
private ArrayList<Review> reviewList = new ArrayList<>();
public DetailFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
id = getActivity().getIntent().getStringExtra("id");
title = getActivity().getIntent().getStringExtra("title");
if (savedInstanceState != null) {
id = savedInstanceState.getString(getString(R.string.key_id));
title = savedInstanceState.getString(getString(R.string.key_title));
mBackdrop = savedInstanceState.getString(getString(R.string.key_backdrop));
mPoster = savedInstanceState.getString(getString(R.string.key_poster));
mRating = savedInstanceState.getString(getString(R.string.key_rating));
mReleaseDate = savedInstanceState.getString(getString(R.string.key_release_date));
mPlot = savedInstanceState.getString(getString(R.string.key_plot));
trailerList = savedInstanceState.getParcelableArrayList(getString(R.string.key_trailer));
reviewList = savedInstanceState.getParcelableArrayList(getString(R.string.key_review));
} else {
//AsyncTask to load the movie info
MovieInfoTask task = new MovieInfoTask();
task.execute(id);
//AsyncTask to load trailer
MovieTrailerTask tTask = new MovieTrailerTask();
tTask.execute(id);
//Asynctask to load review
MovieReviewTask reviewTask = new MovieReviewTask();
reviewTask.execute(id);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_detail, container, false);
final Toolbar toolbar = (Toolbar) view.findViewById(R.id.toolbar);
((AppCompatActivity) getActivity()).setSupportActionBar(toolbar);
((AppCompatActivity) getActivity()).getSupportActionBar().setDisplayHomeAsUpEnabled(true);
CollapsingToolbarLayout collapsingToolbar =
(CollapsingToolbarLayout) view.findViewById(R.id.collapsing_toolbar);
collapsingToolbar.setTitle(title);
imgBackdrop = (ImageView) view.findViewById(R.id.backdrop);
imgPoster = (ImageView) view.findViewById(R.id.posterImage);
tvRating = (TextView) view.findViewById(R.id.tvRating);
tvRelease = (TextView) view.findViewById(R.id.tvRelease);
tvPlot = (TextView) view.findViewById(R.id.tvPlot);
reviewLayout = (LinearLayout) view.findViewById(R.id.linearUserReview);
tvUsername = (TextView) view.findViewById(R.id.review_author_name_view);
tvContent = (TextView) view.findViewById(R.id.review_content_view);
btnMoreReview = (Button) view.findViewById(R.id.detail_reviews_show_more_button);
btnMoreReview.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getActivity(), ReviewActivity.class);
intent.putParcelableArrayListExtra("reviews", reviewList);
startActivity(intent);
}
});
//Trailer
trailerLayout = (LinearLayout) view.findViewById(R.id.linearTrailer);
RecyclerView trailerRecyclerView = (RecyclerView) view.findViewById(R.id.recycler_view_trailer);
trailerRecyclerView.setHasFixedSize(true);
movieTrailerAdapter = new MovieTrailerAdapter(getActivity(), trailerList);
LinearLayoutManager linearLayoutManager =
new LinearLayoutManager(getActivity(), LinearLayoutManager.HORIZONTAL, false);
trailerRecyclerView.setLayoutManager(linearLayoutManager);
trailerRecyclerView.setAdapter(movieTrailerAdapter);
if (savedInstanceState != null) {
setMovieInfo(mBackdrop, mPoster, mRating, mReleaseDate, mPlot);
setMovieReview(reviewList);
}
return view;
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putString(getString(R.string.key_id), id);
outState.putString(getString(R.string.key_title), title);
outState.putString(getString(R.string.key_backdrop), mBackdrop);
outState.putString(getString(R.string.key_poster), mPoster);
outState.putString(getString(R.string.key_rating), mRating);
outState.putString(getString(R.string.key_release_date), mReleaseDate);
outState.putString(getString(R.string.key_plot), mPlot);
outState.putParcelableArrayList(getString(R.string.key_trailer), trailerList);
outState.putParcelableArrayList(getString(R.string.key_review), reviewList);
super.onSaveInstanceState(outState);
}
public void setMovieInfo(String backdrop, String posterPath, String rating,
String releaseDate, String plot) {
//Backdrop
Picasso.with(imgBackdrop.getContext())
.load(POSTER_BASE_URL + BACKDROP_SIZE_PATH + backdrop)
.into(imgBackdrop);
//Poster
Picasso.with(imgPoster.getContext())
.load(POSTER_BASE_URL + POSTER_SIZE_PATH + posterPath)
.priority(Picasso.Priority.HIGH).into(imgPoster);
//Rating
String ratingValue = getResources()
.getString(R.string.rating_value, rating);
tvRating.setText(ratingValue);
//Release date
tvRelease.setText(Utility.loadDate(releaseDate));
//Plot
tvPlot.setText(plot);
}
public void setMovieReview(ArrayList<Review> reviews) {
if (reviews.size() != 0) {
Review r = reviews.get(0);
tvUsername.setText(r.getAuthor());
tvContent.setText(r.getContent());
} else {
reviewLayout.setVisibility(View.GONE);
}
}
private class MovieInfoTask extends FetchMovieInfoTask {
@Override
protected void onPostExecute(Movie movieInfo) {
if (movieInfo != null) {
mBackdrop = movieInfo.getBackdrop();
mPoster = movieInfo.getImg();
mRating = movieInfo.getRating();
mReleaseDate = movieInfo.getReleaseDate();
mPlot = movieInfo.getPlot();
setMovieInfo(mBackdrop, mPoster, mRating, mReleaseDate, mPlot);
}
}
}
private class MovieTrailerTask extends FetchMovieVideoTask {
@Override
protected void onPostExecute(ArrayList<Movie> movies) {
if (movies != null) {
trailerList.clear();
for (Movie m : movies) {
trailerList.add(m);
}
if (trailerList.size() != 0) {
movieTrailerAdapter.notifyDataSetChanged();
} else {
trailerLayout.setVisibility(View.GONE);
}
}
}
}
private class MovieReviewTask extends FetchMovieReviewTask {
@Override
protected void onPostExecute(ArrayList<Review> reviews) {
if (reviews != null) {
reviewList.clear();
for (Review review : reviews) {
reviewList.add(review);
}
setMovieReview(reviewList);
}
}
}
}
| app/src/main/java/com/robo/popularmoviesapp/fragments/DetailFragment.java | package com.robo.popularmoviesapp.fragments;
import android.app.Fragment;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.robo.popularmoviesapp.Movie;
import com.robo.popularmoviesapp.R;
import com.robo.popularmoviesapp.Review;
import com.robo.popularmoviesapp.Utility;
import com.robo.popularmoviesapp.activities.ReviewActivity;
import com.robo.popularmoviesapp.adapters.MovieTrailerAdapter;
import com.robo.popularmoviesapp.asynctask.FetchMovieInfoTask;
import com.robo.popularmoviesapp.asynctask.FetchMovieReviewTask;
import com.robo.popularmoviesapp.asynctask.FetchMovieVideoTask;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
public class DetailFragment extends Fragment {
//Poster and backdrop URL
private final static String POSTER_BASE_URL = "http://image.tmdb.org/t/p/";
private final String LOG_TAG = DetailFragment.class.getSimpleName();
private final String POSTER_SIZE_PATH = "w342";
private final String BACKDROP_SIZE_PATH = "w780";
ImageView imgPoster, imgBackdrop;
TextView tvRating, tvRelease, tvPlot;
TextView tvUsername, tvContent;
Button btnMoreReview;
MovieTrailerAdapter movieTrailerAdapter;
private String id, title;
private String mBackdrop, mPoster, mRating, mReleaseDate, mPlot;
private ArrayList<Movie> trailerList = new ArrayList<>();
private ArrayList<Review> reviewList = new ArrayList<>();
public DetailFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
id = getActivity().getIntent().getStringExtra("id");
title = getActivity().getIntent().getStringExtra("title");
if (savedInstanceState != null) {
id = savedInstanceState.getString(getString(R.string.key_id));
title = savedInstanceState.getString(getString(R.string.key_title));
mBackdrop = savedInstanceState.getString(getString(R.string.key_backdrop));
mPoster = savedInstanceState.getString(getString(R.string.key_poster));
mRating = savedInstanceState.getString(getString(R.string.key_rating));
mReleaseDate = savedInstanceState.getString(getString(R.string.key_release_date));
mPlot = savedInstanceState.getString(getString(R.string.key_plot));
trailerList = savedInstanceState.getParcelableArrayList(getString(R.string.key_trailer));
reviewList = savedInstanceState.getParcelableArrayList(getString(R.string.key_review));
} else {
//AsyncTask to load the movie info
MovieInfoTask task = new MovieInfoTask();
task.execute(id);
//AsyncTask to load trailer
MovieTrailerTask tTask = new MovieTrailerTask();
tTask.execute(id);
//Asynctask to load review
MovieReviewTask reviewTask = new MovieReviewTask();
reviewTask.execute(id);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_detail, container, false);
final Toolbar toolbar = (Toolbar) view.findViewById(R.id.toolbar);
((AppCompatActivity) getActivity()).setSupportActionBar(toolbar);
((AppCompatActivity) getActivity()).getSupportActionBar().setDisplayHomeAsUpEnabled(true);
CollapsingToolbarLayout collapsingToolbar =
(CollapsingToolbarLayout) view.findViewById(R.id.collapsing_toolbar);
collapsingToolbar.setTitle(title);
imgBackdrop = (ImageView) view.findViewById(R.id.backdrop);
imgPoster = (ImageView) view.findViewById(R.id.posterImage);
tvRating = (TextView) view.findViewById(R.id.tvRating);
tvRelease = (TextView) view.findViewById(R.id.tvRelease);
tvPlot = (TextView) view.findViewById(R.id.tvPlot);
tvUsername = (TextView) view.findViewById(R.id.review_author_name_view);
tvContent = (TextView) view.findViewById(R.id.review_content_view);
btnMoreReview = (Button) view.findViewById(R.id.detail_reviews_show_more_button);
btnMoreReview.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(getActivity(), ReviewActivity.class);
intent.putParcelableArrayListExtra("reviews", reviewList);
startActivity(intent);
}
});
//Trailer
RecyclerView trailerRecyclerView = (RecyclerView) view.findViewById(R.id.recycler_view_trailer);
trailerRecyclerView.setHasFixedSize(true);
movieTrailerAdapter = new MovieTrailerAdapter(getActivity(), trailerList);
LinearLayoutManager linearLayoutManager =
new LinearLayoutManager(getActivity(), LinearLayoutManager.HORIZONTAL, false);
trailerRecyclerView.setLayoutManager(linearLayoutManager);
trailerRecyclerView.setAdapter(movieTrailerAdapter);
if (savedInstanceState != null) {
setMovieInfo(mBackdrop, mPoster, mRating, mReleaseDate, mPlot);
setMovieReview(reviewList);
}
return view;
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putString(getString(R.string.key_id), id);
outState.putString(getString(R.string.key_title), title);
outState.putString(getString(R.string.key_backdrop), mBackdrop);
outState.putString(getString(R.string.key_poster), mPoster);
outState.putString(getString(R.string.key_rating), mRating);
outState.putString(getString(R.string.key_release_date), mReleaseDate);
outState.putString(getString(R.string.key_plot), mPlot);
outState.putParcelableArrayList(getString(R.string.key_trailer), trailerList);
outState.putParcelableArrayList(getString(R.string.key_review), reviewList);
super.onSaveInstanceState(outState);
}
public void setMovieInfo(String backdrop, String posterPath, String rating,
String releaseDate, String plot) {
//Backdrop
Picasso.with(imgBackdrop.getContext())
.load(POSTER_BASE_URL + BACKDROP_SIZE_PATH + backdrop)
.into(imgBackdrop);
//Poster
Picasso.with(imgPoster.getContext())
.load(POSTER_BASE_URL + POSTER_SIZE_PATH + posterPath)
.priority(Picasso.Priority.HIGH).into(imgPoster);
//Rating
String ratingValue = getResources()
.getString(R.string.rating_value, rating);
tvRating.setText(ratingValue);
//Release date
tvRelease.setText(Utility.loadDate(releaseDate));
//Plot
tvPlot.setText(plot);
}
public void setMovieReview(ArrayList<Review> reviews) {
Review r = reviews.get(0);
tvUsername.setText(r.getAuthor());
tvContent.setText(r.getContent());
}
private class MovieInfoTask extends FetchMovieInfoTask {
@Override
protected void onPostExecute(Movie movieInfo) {
if (movieInfo != null) {
mBackdrop = movieInfo.getBackdrop();
mPoster = movieInfo.getImg();
mRating = movieInfo.getRating();
mReleaseDate = movieInfo.getReleaseDate();
mPlot = movieInfo.getPlot();
setMovieInfo(mBackdrop, mPoster, mRating, mReleaseDate, mPlot);
}
}
}
private class MovieTrailerTask extends FetchMovieVideoTask {
@Override
protected void onPostExecute(ArrayList<Movie> movies) {
if (movies != null) {
trailerList.clear();
for (Movie m : movies) {
trailerList.add(m);
}
movieTrailerAdapter.notifyDataSetChanged();
}
}
}
private class MovieReviewTask extends FetchMovieReviewTask {
@Override
protected void onPostExecute(ArrayList<Review> reviews) {
if (reviews != null) {
reviewList.clear();
for (Review review : reviews) {
reviewList.add(review);
}
setMovieReview(reviewList);
}
}
}
}
| Bug fix for empty trailer and review
| app/src/main/java/com/robo/popularmoviesapp/fragments/DetailFragment.java | Bug fix for empty trailer and review | <ide><path>pp/src/main/java/com/robo/popularmoviesapp/fragments/DetailFragment.java
<ide> import android.view.ViewGroup;
<ide> import android.widget.Button;
<ide> import android.widget.ImageView;
<add>import android.widget.LinearLayout;
<ide> import android.widget.TextView;
<ide>
<ide> import com.robo.popularmoviesapp.Movie;
<ide> private final String BACKDROP_SIZE_PATH = "w780";
<ide> ImageView imgPoster, imgBackdrop;
<ide> TextView tvRating, tvRelease, tvPlot;
<add>
<ide> TextView tvUsername, tvContent;
<ide> Button btnMoreReview;
<add> LinearLayout reviewLayout;
<ide> MovieTrailerAdapter movieTrailerAdapter;
<add> LinearLayout trailerLayout;
<add>
<ide> private String id, title;
<ide> private String mBackdrop, mPoster, mRating, mReleaseDate, mPlot;
<ide> private ArrayList<Movie> trailerList = new ArrayList<>();
<ide> tvRelease = (TextView) view.findViewById(R.id.tvRelease);
<ide> tvPlot = (TextView) view.findViewById(R.id.tvPlot);
<ide>
<add> reviewLayout = (LinearLayout) view.findViewById(R.id.linearUserReview);
<ide> tvUsername = (TextView) view.findViewById(R.id.review_author_name_view);
<ide> tvContent = (TextView) view.findViewById(R.id.review_content_view);
<ide> btnMoreReview = (Button) view.findViewById(R.id.detail_reviews_show_more_button);
<ide> });
<ide>
<ide> //Trailer
<add> trailerLayout = (LinearLayout) view.findViewById(R.id.linearTrailer);
<ide> RecyclerView trailerRecyclerView = (RecyclerView) view.findViewById(R.id.recycler_view_trailer);
<ide> trailerRecyclerView.setHasFixedSize(true);
<ide> movieTrailerAdapter = new MovieTrailerAdapter(getActivity(), trailerList);
<ide> }
<ide>
<ide> public void setMovieReview(ArrayList<Review> reviews) {
<del> Review r = reviews.get(0);
<del> tvUsername.setText(r.getAuthor());
<del> tvContent.setText(r.getContent());
<add> if (reviews.size() != 0) {
<add> Review r = reviews.get(0);
<add> tvUsername.setText(r.getAuthor());
<add> tvContent.setText(r.getContent());
<add> } else {
<add> reviewLayout.setVisibility(View.GONE);
<add> }
<ide> }
<ide>
<ide> private class MovieInfoTask extends FetchMovieInfoTask {
<ide> for (Movie m : movies) {
<ide> trailerList.add(m);
<ide> }
<del> movieTrailerAdapter.notifyDataSetChanged();
<add> if (trailerList.size() != 0) {
<add> movieTrailerAdapter.notifyDataSetChanged();
<add> } else {
<add> trailerLayout.setVisibility(View.GONE);
<add> }
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | 5a9f5e9ce181731f16308cfcb38742d68fc396e7 | 0 | java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity,java110/MicroCommunity | package com.java110.utils.constant;
/**
* 权限编码常量类
*/
public final class PrivilegeCodeConstant {
private PrivilegeCodeConstant() {
}
//入驻小区
public static final String PRIVILEGE_ENTER_COMMUNITY = "500201904008";
//初始化楼
public static final String PRIVILEGE_FLOOR = "500201904011";
//初始化小区楼单元
public static final String PRIVILEGE_UNIT = "500201904012";
//初始化房屋楼单元
public static final String PRIVILEGE_ROOM = "500201904006";
//出售房屋权限
public static final String PRIVILEGE_SELL_ROOM = "500201904014";
//出售房屋权限
public static final String PRIVILEGE_OWNER_ROOM = "500201904015";
//物业费用配置权限
public static final String PRIVILEGE_PROPERTY_CONFIG_FEE = "500201904016";
//物业费用权限
public static final String PRIVILEGE_PROPERTY_FEE = "500201904004";
//物业停车位权限
public static final String PRIVILEGE_PARKING_SPACE = "500201906017";
//业主停车位
public static final String PRIVILEGE_PARKING_SPACE_FOR_OWNER = "500201906020";
//车位出售
public static final String PRIVILEGE_CAR = "500201906019";
//停车费用配置权限
public static final String PRIVILEGE_PARKING_SPACE_CONFIG_FEE = "500201904021";
// demo 权限编码
public static final String PRIVILEGE_DEMO = "500201906023";
public static final String AGENT_HAS_LIST_COMMUNITY = "500201906025";
public static final String ADMIN_HAS_LIST_COMMUNITY = "502019091797230001";
public static final String AGENT_HAS_LIST_AUDIT_ENTER_COMMUNITY = "502019091978690002";
public static final String HAS_LIST_NOTICE = "500201904009";
public static final String AGENT_HAS_LIST_APP = "500201906026";
public static final String AGENT_HAS_LIST_SERVICE = "500201906027";
public static final String AGENT_HAS_LIST_MAPPING = "500201906029";
public static final String AGENT_HAS_LIST_SERVICEREGISTER = "500201907032";
public static final String HAS_LIST_CACHE = "500201907032";
public static final String LIST_SERVICEIMPL = "500201906028";
public static final String MENU = "500201908035";
public static final String MENU_MANAGE = "502019082521880003";
public static final String BASE_PRIVILEGE = "500201908036";
/**
* 访客相关权限常量
*/
public static final String AGENT_HAS_LIST_VISIT = "500201907033";
/**
* 服务提供权限编码
*/
public static final String SERVICE_PROVIDE = "502019090149430005";
public static final String ADD_OWNER_ROOM = "502019091322800001";
//审核权限编码
public static final String AUDIT_COMMUNITY = "502019091773700003";
public static final String AUDIT_ENTER_COMMUNITY = "502019091978690002";
public static final String GET_FILE = "502019100245350003";
public static final String SAVE_FILE = "502019100245350003";
public static final String LIST_OWNERREPAIR = "502019100524950005";
public static final String LIST_ORG = "502019100721900005";
public static final String ADD_STAFF = "";
public static final String REPAIRDISPATCHSTEP = "502019101946430010";
public static final String CLOSE_REPAIR_DISPATCH = "502019102010870005";
//资源商品
public static final String AGENT_HAS_LIST_RESOURCESTORE = "502019102151130004";
public static final String ITEMOUT = "5020191023000001";
//審核人員 审核人员
public static final String AGENT_HAS_LIST_AUDITUSER = "502019102639930001";
//审核工单
public static final String AGENT_HAS_LIST_AUDITORDER = "502019102911270009";
public static final String LIST_PAY_FEE = "502019103056060006";
//投诉建议 权限编码
public static final String AGENT_HAS_LIST_COMPLAINT = "502019103114270018";
//员工收费
public static final String LIST_STAFF_FEE = "502019103108450011";
//审核工单
public static final String AGENT_HAS_LIST_AUDIT_COMPLAINT = "502019110360380006";
public static final String AGENT_HAS_LIST_AUDIT_HISTORY_COMPLAINT = "502019110534240006";
//设备信息
public static final String AGENT_HAS_LIST_MACHINE = "502019110809250044";
public static final String AGENT_HAS_LIST_MACHINE_TRANSLATE = "502019111025350052";
public static final String AGENT_HAS_LIST_MACHINE_RECORD = "502019112673230007";
public static final String LIST_APPLICATION_KEY = "502019120169500005";
//巡检点权限编码
public static final String AGENT_HAS_LIST_INSPECTION_POINT = "502020021024790007";
//巡检计划权限编码
public static final String AGENT_HAS_LIST_INSPECTION_PLAN = "502020021268740005";
}
| java110-utils/src/main/java/com/java110/utils/constant/PrivilegeCodeConstant.java | package com.java110.utils.constant;
/**
* 权限编码常量类
*/
public final class PrivilegeCodeConstant {
private PrivilegeCodeConstant() {
}
//入驻小区
public static final String PRIVILEGE_ENTER_COMMUNITY = "500201904008";
//初始化楼
public static final String PRIVILEGE_FLOOR = "500201904011";
//初始化小区楼单元
public static final String PRIVILEGE_UNIT = "500201904012";
//初始化房屋楼单元
public static final String PRIVILEGE_ROOM = "500201904006";
//出售房屋权限
public static final String PRIVILEGE_SELL_ROOM = "500201904014";
//出售房屋权限
public static final String PRIVILEGE_OWNER_ROOM = "500201904015";
//物业费用配置权限
public static final String PRIVILEGE_PROPERTY_CONFIG_FEE = "500201904016";
//物业费用权限
public static final String PRIVILEGE_PROPERTY_FEE = "500201904004";
//物业停车位权限
public static final String PRIVILEGE_PARKING_SPACE = "500201906017";
//业主停车位
public static final String PRIVILEGE_PARKING_SPACE_FOR_OWNER = "500201906020";
//车位出售
public static final String PRIVILEGE_CAR = "500201906019";
//停车费用配置权限
public static final String PRIVILEGE_PARKING_SPACE_CONFIG_FEE = "500201904021";
// demo 权限编码
public static final String PRIVILEGE_DEMO = "500201906023";
public static final String AGENT_HAS_LIST_COMMUNITY = "500201906025";
public static final String ADMIN_HAS_LIST_COMMUNITY = "502019091797230001";
public static final String AGENT_HAS_LIST_AUDIT_ENTER_COMMUNITY = "502019091978690002";
public static final String HAS_LIST_NOTICE = "500201904009";
public static final String AGENT_HAS_LIST_APP = "500201906026";
public static final String AGENT_HAS_LIST_SERVICE = "500201906027";
public static final String AGENT_HAS_LIST_MAPPING = "500201906029";
public static final String AGENT_HAS_LIST_SERVICEREGISTER = "500201907032";
public static final String HAS_LIST_CACHE = "500201907032";
public static final String LIST_SERVICEIMPL = "500201906028";
public static final String MENU = "500201908035";
public static final String MENU_MANAGE = "502019082521880003";
public static final String BASE_PRIVILEGE = "500201908036";
/**
* 访客相关权限常量
*/
public static final String AGENT_HAS_LIST_VISIT = "500201907033";
/**
* 服务提供权限编码
*/
public static final String SERVICE_PROVIDE = "502019090149430005";
public static final String ADD_OWNER_ROOM = "502019091322800001";
//审核权限编码
public static final String AUDIT_COMMUNITY = "502019091773700003";
public static final String AUDIT_ENTER_COMMUNITY = "502019091978690002";
public static final String GET_FILE = "502019100245350003";
public static final String SAVE_FILE = "502019100245350003";
public static final String LIST_OWNERREPAIR = "502019100524950005";
public static final String LIST_ORG = "502019100721900005";
public static final String ADD_STAFF = "";
public static final String REPAIRDISPATCHSTEP = "502019101946430010";
public static final String CLOSE_REPAIR_DISPATCH = "502019102010870005";
//资源商品
public static final String AGENT_HAS_LIST_RESOURCESTORE = "502019102151130004";
public static final String ITEMOUT = "5020191023000001";
//審核人員 审核人员
public static final String AGENT_HAS_LIST_AUDITUSER = "502019102639930001";
//审核工单
public static final String AGENT_HAS_LIST_AUDITORDER = "502019102911270009";
public static final String LIST_PAY_FEE = "502019103056060006";
//投诉建议 权限编码
public static final String AGENT_HAS_LIST_COMPLAINT = "502019103114270018";
//员工收费
public static final String LIST_STAFF_FEE = "502019103108450011";
//审核工单
public static final String AGENT_HAS_LIST_AUDIT_COMPLAINT = "502019110360380006";
public static final String AGENT_HAS_LIST_AUDIT_HISTORY_COMPLAINT = "502019110534240006";
//设备信息
public static final String AGENT_HAS_LIST_MACHINE = "502019110809250044";
public static final String AGENT_HAS_LIST_MACHINE_TRANSLATE = "502019111025350052";
public static final String AGENT_HAS_LIST_MACHINE_RECORD = "502019112673230007";
public static final String LIST_APPLICATION_KEY = "502019120169500005";
//巡检点权限编码
public static final String AGENT_HAS_LIST_INSPECTION_POINT = "502020021024790007";
}
| 更新PrivilegeCodeConstant
| java110-utils/src/main/java/com/java110/utils/constant/PrivilegeCodeConstant.java | 更新PrivilegeCodeConstant | <ide><path>ava110-utils/src/main/java/com/java110/utils/constant/PrivilegeCodeConstant.java
<ide>
<ide> //巡检点权限编码
<ide> public static final String AGENT_HAS_LIST_INSPECTION_POINT = "502020021024790007";
<add> //巡检计划权限编码
<add> public static final String AGENT_HAS_LIST_INSPECTION_PLAN = "502020021268740005";
<add>
<ide>
<ide>
<ide> |
|
Java | agpl-3.0 | 5f83c1c7afa13d32e021ac328bcd1988008070d6 | 0 | David-Development/ownCloud-Account-Importer | package com.nextcloud.android.sso.api;
import android.content.Context;
import android.os.Looper;
import android.util.Log;
import com.nextcloud.android.sso.aidl.NextcloudRequest;
import com.nextcloud.android.sso.helper.ExponentialBackoff;
import com.nextcloud.android.sso.model.SingleSignOnAccount;
import java.io.InputStream;
public abstract class NetworkRequest {
private static final String TAG = NetworkRequest.class.getCanonicalName();
private SingleSignOnAccount mAccount;
protected Context mContext;
protected NextcloudAPI.ApiConnectedListener mCallback;
protected boolean mDestroyed = false; // Flag indicating if API is destroyed
protected NetworkRequest(Context context, SingleSignOnAccount account, NextcloudAPI.ApiConnectedListener callback) {
this.mContext = context;
this.mAccount = account;
this.mCallback = callback;
}
protected void connect(String type) {
Log.v(TAG, "Nextcloud Single sign-on connect() called [" + Thread.currentThread().getName() + "] Account-Type: [" + type + "]");
if (mDestroyed) {
throw new IllegalStateException("API already destroyed! You cannot reuse a stopped API instance");
}
}
protected abstract InputStream performNetworkRequest(NextcloudRequest request, InputStream requestBodyInputStream) throws Exception;
protected void connectApiWithBackoff() {
new ExponentialBackoff(1000, 10000, 2, 5, Looper.getMainLooper(), () -> {
connect(mAccount.type);
}).start();
}
protected void stop() {
mCallback = null;
mAccount = null;
mDestroyed = true;
}
protected String getAccountName() {
return mAccount.name;
}
protected String getAccountToken() {
return mAccount.token;
}
}
| src/main/java/com/nextcloud/android/sso/api/NetworkRequest.java | package com.nextcloud.android.sso.api;
import android.content.Context;
import android.os.Looper;
import android.util.Log;
import com.nextcloud.android.sso.aidl.NextcloudRequest;
import com.nextcloud.android.sso.helper.ExponentialBackoff;
import com.nextcloud.android.sso.model.SingleSignOnAccount;
import java.io.InputStream;
public abstract class NetworkRequest {
private static final String TAG = NetworkRequest.class.getCanonicalName();
private SingleSignOnAccount mAccount;
protected Context mContext;
protected NextcloudAPI.ApiConnectedListener mCallback;
protected boolean mDestroyed = false; // Flag indicating if API is destroyed
protected NetworkRequest(Context context, SingleSignOnAccount account, NextcloudAPI.ApiConnectedListener callback) {
this.mContext = context;
this.mAccount = account;
this.mCallback = callback;
}
protected void connect(String type) {
Log.v(TAG, "Nextcloud Single sign-on connect() called [" + Thread.currentThread().getName() + "]");
if (mDestroyed) {
throw new IllegalStateException("API already destroyed! You cannot reuse a stopped API instance");
}
}
protected abstract InputStream performNetworkRequest(NextcloudRequest request, InputStream requestBodyInputStream) throws Exception;
protected void connectApiWithBackoff() {
new ExponentialBackoff(1000, 10000, 2, 5, Looper.getMainLooper(), () -> {
connect(mAccount.type);
}).start();
}
protected void stop() {
mCallback = null;
mAccount = null;
mDestroyed = true;
}
protected String getAccountName() {
return mAccount.name;
}
protected String getAccountToken() {
return mAccount.token;
}
}
| improve logging (log account type)
| src/main/java/com/nextcloud/android/sso/api/NetworkRequest.java | improve logging (log account type) | <ide><path>rc/main/java/com/nextcloud/android/sso/api/NetworkRequest.java
<ide>
<ide>
<ide> protected void connect(String type) {
<del> Log.v(TAG, "Nextcloud Single sign-on connect() called [" + Thread.currentThread().getName() + "]");
<add> Log.v(TAG, "Nextcloud Single sign-on connect() called [" + Thread.currentThread().getName() + "] Account-Type: [" + type + "]");
<ide> if (mDestroyed) {
<ide> throw new IllegalStateException("API already destroyed! You cannot reuse a stopped API instance");
<ide> } |
|
Java | cc0-1.0 | 6f37b6ff39c827195c34c5e1d2bb77b9d58f4d39 | 0 | cmurtheepic/Mizer | package com.spizer.mizer2;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.os.Handler;
import java.util.Random;
public class BasicsPractice extends AppCompatActivity {
/**
* calls to make the class ProblemSelector usable in this class
**/
@SuppressWarnings("unused")
private ProblemSelector PS = new ProblemSelector();
/**
* calls to make the class DifficultyMenu usable in this class
**/
private DifficultyMenu DM = new DifficultyMenu();
private TextView PO;
private TextView C;
private TextView IC;
private TextView S;
Button button1;
Button button2;
Button button3;
Button button4;
Button button5;
Button button6;
Button button7;
Button button8;
Button button9;
private String B1;
private String B2;
private String B3;
private String B4;
private String B5;
private String B6;
private String B7;
private String B8;
private String B9;
public String getB1() {
return B1;
}
public void setB1(String b1) {
B1 = b1;
}
public String getB2() {
return B2;
}
public void setB2(String b2) {
B2 = b2;
}
public String getB3() {
return B3;
}
public void setB3(String b3) {
B3 = b3;
}
public String getB4() {
return B4;
}
public void setB4(String b4) {
B4 = b4;
}
public String getB5() {
return B5;
}
public void setB5(String b5) {
B5 = b5;
}
public String getB6() {
return B6;
}
public void setB6(String b6) {
B6 = b6;
}
public String getB7() {
return B7;
}
public void setB7(String b7) {
B7 = b7;
}
public String getB8() {
return B8;
}
public void setB8(String b8) {
B8 = b8;
}
public String getB9() {
return B9;
}
public void setB9(String b9) {
B9 = b9;
}
private String Output;
@SuppressWarnings("unused")
private Random r;
private int Max1 = 4;
public int AD = DM.getAD();
public int SD = DM.getSD();
public int MD = DM.getMD() + 420;
public int DD = DM.getDD();
public boolean AddP = PS.getAddProb();
public boolean SubP = PS.getSubProb();
public boolean MultiP = PS.getMultiProb();
public boolean DivisP = PS.getDivisProb();
public boolean AChecked = DM.getAChecked();
public boolean SChecked = DM.getSChecked();
public boolean MChecked = DM.getMChecked();
public boolean DChecked = DM.getDChecked();
private boolean AddProbT;
private boolean SubProbT;
private boolean MultiProbT;
private boolean DivisProbT;
public boolean getAddProbT() {
return AddProbT;
}
public void setAddProbT(boolean addProbT) {
AddProbT = addProbT;
}
public boolean getSubProbT() {
return SubProbT;
}
public void setSubProbT(boolean subProbT) {
SubProbT = subProbT;
}
public boolean getMultiProbT() {
return MultiProbT;
}
public void setMultiProbT(boolean multiProbT) {
MultiProbT = multiProbT;
}
public boolean getDivisProbT() {
return DivisProbT;
}
public void setDivisProbT(boolean divisProbT) {
DivisProbT = divisProbT;
}
private int score = 0;
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
private int SS1;
public int getSS1() {
return SS1;
}
public void setSS1(int SS1) {
this.SS1 = SS1;
}
private int S1;
public int getS1() {
return S1;
}
public void setS1(int s1) {
S1 = s1;
}
private boolean clicked;
public boolean getClicked() {
return clicked;
}
public void setClicked(boolean clicked) {
this.clicked = clicked;
}
Handler Handler = new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_basics_practice);
PO = (TextView) findViewById(R.id.problemoutput);
C = (TextView) findViewById(R.id.correctTextView);
IC = (TextView) findViewById(R.id.IncorrectTextView);
S = (TextView) findViewById(R.id.scoreView);
S.setText(Integer.toString(score));
button1 = (Button) findViewById(R.id.button1);
button2 = (Button) findViewById(R.id.button2);
button3 = (Button) findViewById(R.id.button3);
button4 = (Button) findViewById(R.id.button4);
button5 = (Button) findViewById(R.id.button5);
button6 = (Button) findViewById(R.id.button6);
button7 = (Button) findViewById(R.id.button7);
button8 = (Button) findViewById(R.id.button8);
button9 = (Button) findViewById(R.id.button9);
button1.setOnClickListener(myB1);
button2.setOnClickListener(myB2);
button3.setOnClickListener(myB3);
button4.setOnClickListener(myB4);
button5.setOnClickListener(myB5);
button6.setOnClickListener(myB6);
button7.setOnClickListener(myB7);
button8.setOnClickListener(myB8);
button9.setOnClickListener(myB9);
// generateNums();
GenerateOperation();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_basics_practice, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("unused")
private void generateNums() {
if (AddP && AChecked && !SubP && !MultiP && !DivisP) {
Max1 = 1;
} else if (!AddP && SubP && SChecked && !MultiP && !DivisP) {
Max1 = 1;
} else if (!AddP && !SubP && MultiP && MChecked && !DivisP) {
Max1 = 1;
} else if (!AddP && !SubP && !MultiP && DivisP && DChecked) {
Max1 = 1;
} else if (AddP && AChecked && SubP && SChecked && !MultiP && !DivisP) {
Max1 = 2;
} else if (AddP && AChecked && !SubP && MultiP && MChecked && !DivisP) {
Max1 = 2;
} else if (AddP && AChecked && !SubP && !MultiP && DivisP && DChecked) {
Max1 = 2;
} else if (!AddP && SubP && SChecked && MultiP && MChecked && !DivisP) {
Max1 = 2;
} else if (!AddP && SubP && SChecked && !MultiP && DivisP && DChecked) {
Max1 = 2;
} else if (!AddP && !SubP && MultiP && MChecked && DivisP && DChecked) {
Max1 = 2;
} else if (AddP && AChecked && SubP && SChecked && MultiP && MChecked && !DivisP) {
Max1 = 3;
} else if (AddP && AChecked && SubP && SChecked && !MultiP && DivisP && DChecked) {
Max1 = 3;
} else if (AddP && AChecked && !SubP && MultiP && MChecked && DivisP && DChecked) {
Max1 = 3;
} else if (!AddP && SubP && SChecked && MultiP && MChecked && DivisP && DChecked) {
Max1 = 3;
} else if (AddP && AChecked && SubP && SChecked && MultiP && MChecked && DivisP && DChecked) {
Max1 = 4;
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong at line : 119");
}
Max1 = Max1 + 1;
}
/** randomly generates a operation to do from a value of 0-value determined by operations the user has chosen to practice **/
/** also generates 2 values for the user to add, subtract, multiply, or divide. to solve the problem **/
private void GenerateOperation() {
int AS1;
Double d = Math.random() * Max1;
setS1((int) (Math.random() * 9));
int o1 = d.intValue();
Log.d("BasicsPractice.java", "the GenerateOperation function generated : " + o1 + " : as the number to determine the operation to be built");
if (o1 == 0) {
if (AddP) {
setAddProbT(true);
setSubProbT(false);
setMultiProbT(false);
setDivisProbT(false);
Log.d("BasicsPractice.java", "the add difficulty is : " + AD);
int a1 = (int) (Math.random() * DM.getAD());
Log.d("BasicsPractice.java", "the first Addition number generated was: " + a1);
int a2 = (int) (Math.random() * DM.getAD());
Log.d("BasicsPractice.java", "the second Addition number generated was: " + a2);
AS1 = a1 + a2;
Output = a1 + " + " + a2;
Log.d("BasicsPractice.java", "the outputted problem was: " + Output);
if(AS1 < 0) { GenerateOperation(); }
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
Log.d("BasicsPractice.java", "the button number is : " + S1 + 1);
Log.d("BasicsPractice.java", "the button output is : " + AS1);
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 1) {
if (SubP) {
setAddProbT(false);
setSubProbT(true);
setMultiProbT(false);
setDivisProbT(false);
Double d2 = Math.random() * DM.getSD();
int s1 = d2.intValue();
Double d6 = Math.random() * DM.getSD();
int s2 = d6.intValue();
AS1 = s1 - s2;
Output = s1 + " - " + s2;
// if(AS1 < 0) {GenerateOperation();}
if (getS1() == 0) {
B1 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 2) {
if (MultiP) {
setAddProbT(false);
setSubProbT(false);
setMultiProbT(true);
setDivisProbT(false);
Double d3 = Math.random() * DM.getMD();
int m1 = d3.intValue();
Double d7 = Math.random() * DM.getMD();
int m2 = d7.intValue();
AS1 = m1 * m2;
Output = m1 + " * " + m2;
if(AS1 < 0) {GenerateOperation();}
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 3) {
if (DivisP) {
setAddProbT(false);
setSubProbT(false);
setMultiProbT(false);
setDivisProbT(true);
Double d4 = Math.random() * DM.getDD();
int d1 = d4.intValue();
Double d8 = Math.random() * DM.getDD();
int d2 = d8.intValue();
AS1 = d1 / d2;
Output = d1 + " / " + d2;
if(AS1 < 0) { GenerateOperation(); }
if(d2 > d1) { GenerateOperation(); }
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
}
}
/** this outputs the value of the String *Output* to the Output text field on screen **/
/** this is the problem the user must solver to either get a correct or incorrect answer **/
private void OutputToProblemView() {
PO.setText(Output);
}
/** this outputs the value of the B(num) int variables to the desired button on screen that in the Ui **/
/** the desired button will be the correct answer **/
private void OutputToAnswerSelection() {
button1.setText(B1);
button2.setText(B2);
button3.setText(B3);
button4.setText(B4);
button5.setText(B5);
button6.setText(B6);
button7.setText(B7);
button8.setText(B8);
button9.setText(B9);
try {
button1.setText(B1);
} catch (Exception e) {
e.printStackTrace();
}
try {
button2.setText(B2);
} catch (Exception e) {
e.printStackTrace();
}
try {
button3.setText(B3);
} catch (Exception e) {
e.printStackTrace();
}
try {
button4.setText(B4);
} catch (Exception e) {
e.printStackTrace();
}
try {
button5.setText(B5);
} catch (Exception e) {
e.printStackTrace();
}
try {
button6.setText(B6);
} catch (Exception e) {
e.printStackTrace();
}
try {
button7.setText(B7);
} catch (Exception e) {
e.printStackTrace();
}
try {
button8.setText(B8);
} catch (Exception e) {
e.printStackTrace();
}
try {
button9.setText(B9);
} catch (Exception e) {
e.printStackTrace();
}
}
/** is called when the user clicks the Skip button **/
/** it causes the generation of a new problem for the user to solve **/
/** as well as a new random answer for the user to solve **/
public void Skip(@SuppressWarnings("UnusedParameters") View view) {
GenerateOperation();
}
/** these functions listen for one of the buttons on screen to be clicked **/
/** they then determine what needs to be displayed the correct or incorrect on screen text **/
/** then they do so with a delay determined in milliseconds **/
View.OnClickListener myB1 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 0) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB2 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 1) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB3 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 2) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB4 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 3) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB5 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 4) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB6 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 5) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB7 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 6) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB8 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 7) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB9 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 8) {
setClicked(true);
C.setEnabled(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
/** this is called when the user has clicked an correct answer **/
private Runnable Correct1 = new Runnable() {
@Override
public void run() {
C.setEnabled(false);
setClicked(false);
ScoreUpdate();
}
};
/** this is called when the user has clicked an incorrect answer **/
private Runnable InCorrect1 = new Runnable() {
@Override
public void run() {
IC.setEnabled(false);
setClicked(false);
GenerateOperation();
}
};
/** this function updates the score displayed on screen **/
private void ScoreUpdate() {
setScore(getScore()+1);
S.setText(Integer.toString(getScore()));
GenerateOperation();
}
} | app/src/main/java/com/spizer/mizer2/BasicsPractice.java | package com.spizer.mizer2;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.os.Handler;
import java.util.Random;
public class BasicsPractice extends AppCompatActivity {
/**
* calls to make the class ProblemSelector usable in this class
**/
@SuppressWarnings("unused")
private ProblemSelector PS = new ProblemSelector();
/**
* calls to make the class DifficultyMenu usable in this class
**/
private DifficultyMenu DM = new DifficultyMenu();
private TextView PO;
private TextView C;
private TextView IC;
private TextView S;
Button button1;
Button button2;
Button button3;
Button button4;
Button button5;
Button button6;
Button button7;
Button button8;
Button button9;
private String B1;
private String B2;
private String B3;
private String B4;
private String B5;
private String B6;
private String B7;
private String B8;
private String B9;
public String getB1() {
return B1;
}
public void setB1(String b1) {
B1 = b1;
}
public String getB2() {
return B2;
}
public void setB2(String b2) {
B2 = b2;
}
public String getB3() {
return B3;
}
public void setB3(String b3) {
B3 = b3;
}
public String getB4() {
return B4;
}
public void setB4(String b4) {
B4 = b4;
}
public String getB5() {
return B5;
}
public void setB5(String b5) {
B5 = b5;
}
public String getB6() {
return B6;
}
public void setB6(String b6) {
B6 = b6;
}
public String getB7() {
return B7;
}
public void setB7(String b7) {
B7 = b7;
}
public String getB8() {
return B8;
}
public void setB8(String b8) {
B8 = b8;
}
public String getB9() {
return B9;
}
public void setB9(String b9) {
B9 = b9;
}
private String Output;
@SuppressWarnings("unused")
private Random r;
private int Max1 = 4;
public int AD = DM.getAD();
public int SD = DM.getSD();
public int MD = DM.getMD() + 420;
public int DD = DM.getDD();
public boolean AddP = PS.getAddProb();
public boolean SubP = PS.getSubProb();
public boolean MultiP = PS.getMultiProb();
public boolean DivisP = PS.getDivisProb();
public boolean AChecked = DM.getAChecked();
public boolean SChecked = DM.getSChecked();
public boolean MChecked = DM.getMChecked();
public boolean DChecked = DM.getDChecked();
private boolean AddProbT;
private boolean SubProbT;
private boolean MultiProbT;
private boolean DivisProbT;
public boolean getAddProbT() {
return AddProbT;
}
public void setAddProbT(boolean addProbT) {
AddProbT = addProbT;
}
public boolean getSubProbT() {
return SubProbT;
}
public void setSubProbT(boolean subProbT) {
SubProbT = subProbT;
}
public boolean getMultiProbT() {
return MultiProbT;
}
public void setMultiProbT(boolean multiProbT) {
MultiProbT = multiProbT;
}
public boolean getDivisProbT() {
return DivisProbT;
}
public void setDivisProbT(boolean divisProbT) {
DivisProbT = divisProbT;
}
private int score = 0;
public int getScore() {
return score;
}
public void setScore(int score) {
this.score = score;
}
private int SS1;
public int getSS1() {
return SS1;
}
public void setSS1(int SS1) {
this.SS1 = SS1;
}
private int S1;
public int getS1() {
return S1;
}
public void setS1(int s1) {
S1 = s1;
}
private boolean clicked;
public boolean getClicked() {
return clicked;
}
public void setClicked(boolean clicked) {
this.clicked = clicked;
}
Handler Handler = new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_basics_practice);
PO = (TextView) findViewById(R.id.problemoutput);
C = (TextView) findViewById(R.id.correctTextView);
IC = (TextView) findViewById(R.id.IncorrectTextView);
S = (TextView) findViewById(R.id.scoreView);
S.setText(Integer.toString(score));
button1 = (Button) findViewById(R.id.button1);
button2 = (Button) findViewById(R.id.button2);
button3 = (Button) findViewById(R.id.button3);
button4 = (Button) findViewById(R.id.button4);
button5 = (Button) findViewById(R.id.button5);
button6 = (Button) findViewById(R.id.button6);
button7 = (Button) findViewById(R.id.button7);
button8 = (Button) findViewById(R.id.button8);
button9 = (Button) findViewById(R.id.button9);
button1.setOnClickListener(myB1);
button2.setOnClickListener(myB2);
button3.setOnClickListener(myB3);
button4.setOnClickListener(myB4);
button5.setOnClickListener(myB5);
button6.setOnClickListener(myB6);
button7.setOnClickListener(myB7);
button8.setOnClickListener(myB8);
button9.setOnClickListener(myB9);
// generateNums();
GenerateOperation();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_basics_practice, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@SuppressWarnings("unused")
private void generateNums() {
if (AddP && AChecked && !SubP && !MultiP && !DivisP) {
Max1 = 1;
} else if (!AddP && SubP && SChecked && !MultiP && !DivisP) {
Max1 = 1;
} else if (!AddP && !SubP && MultiP && MChecked && !DivisP) {
Max1 = 1;
} else if (!AddP && !SubP && !MultiP && DivisP && DChecked) {
Max1 = 1;
} else if (AddP && AChecked && SubP && SChecked && !MultiP && !DivisP) {
Max1 = 2;
} else if (AddP && AChecked && !SubP && MultiP && MChecked && !DivisP) {
Max1 = 2;
} else if (AddP && AChecked && !SubP && !MultiP && DivisP && DChecked) {
Max1 = 2;
} else if (!AddP && SubP && SChecked && MultiP && MChecked && !DivisP) {
Max1 = 2;
} else if (!AddP && SubP && SChecked && !MultiP && DivisP && DChecked) {
Max1 = 2;
} else if (!AddP && !SubP && MultiP && MChecked && DivisP && DChecked) {
Max1 = 2;
} else if (AddP && AChecked && SubP && SChecked && MultiP && MChecked && !DivisP) {
Max1 = 3;
} else if (AddP && AChecked && SubP && SChecked && !MultiP && DivisP && DChecked) {
Max1 = 3;
} else if (AddP && AChecked && !SubP && MultiP && MChecked && DivisP && DChecked) {
Max1 = 3;
} else if (!AddP && SubP && SChecked && MultiP && MChecked && DivisP && DChecked) {
Max1 = 3;
} else if (AddP && AChecked && SubP && SChecked && MultiP && MChecked && DivisP && DChecked) {
Max1 = 4;
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong at line : 119");
}
Max1 = Max1 + 1;
}
/** randomly generates a operation to do from a value of 0-value determined by operations the user has chosen to practice **/
/** also generates 2 values for the user to add, subtract, multiply, or divide. to solve the problem **/
private void GenerateOperation() {
int AS1;
Double d = Math.random() * Max1;
setS1((int) (Math.random() * 9));
int o1 = d.intValue();
Log.d("BasicsPractice.java", "the GenerateOperation function generated : " + o1 + " : as the number to determine the operation to be built");
if (o1 == 0) {
if (AddP) {
setAddProbT(true);
setSubProbT(false);
setMultiProbT(false);
setDivisProbT(false);
Log.d("BasicsPractice.java", "the add difficulty is : " + AD);
int a1 = (int) (Math.random() * DM.getAD());
Log.d("BasicsPractice.java", "the first Addition number generated was: " + a1);
int a2 = (int) (Math.random() * DM.getAD());
Log.d("BasicsPractice.java", "the second Addition number generated was: " + a2);
AS1 = a1 + a2;
Output = a1 + " + " + a2;
Log.d("BasicsPractice.java", "the outputted problem was: " + Output);
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B7 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B6 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B5 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B4 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B3 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
B1 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
Log.d("BasicsPractice.java", "the button number is : " + S1 + 1);
Log.d("BasicsPractice.java", "the button output is : " + AS1);
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 1) {
if (SubP) {
setAddProbT(false);
setSubProbT(true);
setMultiProbT(false);
setDivisProbT(false);
Double d2 = Math.random() * DM.getSD();
int s1 = d2.intValue();
Double d6 = Math.random() * DM.getSD();
int s2 = d6.intValue();
AS1 = s1 - s2;
Output = s1 + " - " + s2;
// if(AS1 < 0) {GenerateOperation();}
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * DM.getSD()));
B7 = Integer.toString((int) (Math.random() * DM.getSD()));
B6 = Integer.toString((int) (Math.random() * DM.getSD()));
B5 = Integer.toString((int) (Math.random() * DM.getSD()));
B4 = Integer.toString((int) (Math.random() * DM.getSD()));
B3 = Integer.toString((int) (Math.random() * DM.getSD()));
B2 = Integer.toString((int) (Math.random() * DM.getSD()));
B1 = Integer.toString((int) (Math.random() * DM.getSD()));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 2) {
if (MultiP) {
setAddProbT(false);
setSubProbT(false);
setMultiProbT(true);
setDivisProbT(false);
Double d3 = Math.random() * DM.getMD();
int m1 = d3.intValue();
Double d7 = Math.random() * DM.getMD();
int m2 = d7.intValue();
AS1 = m1 * m2;
Output = m1 + " * " + m2;
if(AS1 < 0) {GenerateOperation();}
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B9 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * MD));
B8 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * MD));
B7 = Integer.toString((int) (Math.random() * MD));
B6 = Integer.toString((int) (Math.random() * MD));
B5 = Integer.toString((int) (Math.random() * MD));
B4 = Integer.toString((int) (Math.random() * MD));
B3 = Integer.toString((int) (Math.random() * MD));
B2 = Integer.toString((int) (Math.random() * MD));
B1 = Integer.toString((int) (Math.random() * MD));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
} else if (o1 == 3) {
if (DivisP) {
setAddProbT(false);
setSubProbT(false);
setMultiProbT(false);
setDivisProbT(true);
Double d4 = Math.random() * DM.getDD();
int d1 = d4.intValue();
Double d8 = Math.random() * DM.getDD();
int d2 = d8.intValue();
AS1 = d1 / d2;
Output = d1 + " / " + d2;
if (getS1() == 0) {
B1 = Integer.toString(AS1);
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 1) {
B2 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 2) {
B3 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 3) {
B4 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 4) {
B5 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 5) {
B6 = Integer.toString(AS1);
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 6) {
B7 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 7) {
B8 = Integer.toString(AS1);
B9 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else if (S1 == 8) {
B9 = Integer.toString(AS1);
B8 = Integer.toString((int) (Math.random() * DM.getDD()));
B7 = Integer.toString((int) (Math.random() * DM.getDD()));
B6 = Integer.toString((int) (Math.random() * DM.getDD()));
B5 = Integer.toString((int) (Math.random() * DM.getDD()));
B4 = Integer.toString((int) (Math.random() * DM.getDD()));
B3 = Integer.toString((int) (Math.random() * DM.getDD()));
B2 = Integer.toString((int) (Math.random() * DM.getDD()));
B1 = Integer.toString((int) (Math.random() * DM.getDD()));
} else {
Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
}
OutputToAnswerSelection();
OutputToProblemView();
}
}
}
/** this outputs the value of the String *Output* to the Output text field on screen **/
/** this is the problem the user must solver to either get a correct or incorrect answer **/
private void OutputToProblemView() {
PO.setText(Output);
}
/** this outputs the value of the B(num) int variables to the desired button on screen that in the Ui **/
/** the desired button will be the correct answer **/
private void OutputToAnswerSelection() {
button1.setText(B1);
button2.setText(B2);
button3.setText(B3);
button4.setText(B4);
button5.setText(B5);
button6.setText(B6);
button7.setText(B7);
button8.setText(B8);
button9.setText(B9);
try {
button1.setText(B1);
} catch (Exception e) {
e.printStackTrace();
}
try {
button2.setText(B2);
} catch (Exception e) {
e.printStackTrace();
}
try {
button3.setText(B3);
} catch (Exception e) {
e.printStackTrace();
}
try {
button4.setText(B4);
} catch (Exception e) {
e.printStackTrace();
}
try {
button5.setText(B5);
} catch (Exception e) {
e.printStackTrace();
}
try {
button6.setText(B6);
} catch (Exception e) {
e.printStackTrace();
}
try {
button7.setText(B7);
} catch (Exception e) {
e.printStackTrace();
}
try {
button8.setText(B8);
} catch (Exception e) {
e.printStackTrace();
}
try {
button9.setText(B9);
} catch (Exception e) {
e.printStackTrace();
}
}
/** is called when the user clicks the Skip button **/
/** it causes the generation of a new problem for the user to solve **/
/** as well as a new random answer for the user to solve **/
public void Skip(@SuppressWarnings("UnusedParameters") View view) {
GenerateOperation();
}
/** these functions listen for one of the buttons on screen to be clicked **/
/** they then determine what needs to be displayed the correct or incorrect on screen text **/
/** then they do so with a delay determined in milliseconds **/
View.OnClickListener myB1 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 0) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB2 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 1) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB3 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 2) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB4 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 3) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB5 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 4) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB6 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 5) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB7 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 6) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB8 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 7) {
C.setEnabled(true);
setClicked(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
View.OnClickListener myB9 = new View.OnClickListener() {
@Override
public void onClick(View v) {
if(getClicked()) {
if (getS1() == 8) {
setClicked(true);
C.setEnabled(true);
Handler.postDelayed(Correct1, 1750);
} else {
IC.setEnabled(true);
setClicked(true);
Handler.postDelayed(InCorrect1, 1750);
}
}
}
};
/** this is called when the user has clicked an correct answer **/
private Runnable Correct1 = new Runnable() {
@Override
public void run() {
C.setEnabled(false);
setClicked(false);
ScoreUpdate();
}
};
/** this is called when the user has clicked an incorrect answer **/
private Runnable InCorrect1 = new Runnable() {
@Override
public void run() {
IC.setEnabled(false);
setClicked(false);
GenerateOperation();
}
};
/** this function updates the score displayed on screen **/
private void ScoreUpdate() {
setScore(getScore()+1);
S.setText(Integer.toString(getScore()));
GenerateOperation();
}
} | minor fixes
- added the generations of fake negative answers on subtraction problems
- fixed where the program would generate division problems that would
come to be less than a whole number or less than 1
| app/src/main/java/com/spizer/mizer2/BasicsPractice.java | minor fixes | <ide><path>pp/src/main/java/com/spizer/mizer2/BasicsPractice.java
<ide> AS1 = a1 + a2;
<ide> Output = a1 + " + " + a2;
<ide> Log.d("BasicsPractice.java", "the outputted problem was: " + Output);
<add> if(AS1 < 0) { GenerateOperation(); }
<ide> if (getS1() == 0) {
<ide> B1 = Integer.toString(AS1);
<ide> B2 = Integer.toString((int) (Math.random() * DM.getAD() + DM.getAD()));
<ide> // if(AS1 < 0) {GenerateOperation();}
<ide> if (getS1() == 0) {
<ide> B1 = Integer.toString(AS1);
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 1) {
<ide> B2 = Integer.toString(AS1);
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 2) {
<ide> B3 = Integer.toString(AS1);
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 3) {
<ide> B4 = Integer.toString(AS1);
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 4) {
<ide> B5 = Integer.toString(AS1);
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 5) {
<ide> B6 = Integer.toString(AS1);
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 6) {
<ide> B7 = Integer.toString(AS1);
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 7) {
<ide> B8 = Integer.toString(AS1);
<del> B9 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B9 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B9 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else if (S1 == 8) {
<ide> B9 = Integer.toString(AS1);
<del> B8 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B7 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B6 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B5 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B4 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B3 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B2 = Integer.toString((int) (Math.random() * DM.getSD()));
<del> B1 = Integer.toString((int) (Math.random() * DM.getSD()));
<add> if((int) (Math.random() * 2) == 1) {B1 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B1 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B2 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B2 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B3 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B3 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B4 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B4 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B5 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B5 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B6 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B6 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B7 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B7 = Integer.toString((int) (Math.random() * DM.getSD()));}
<add> if((int) (Math.random() * 2) == 1) {B8 = "-" + Integer.toString((int) (Math.random() * DM.getSD()));} else {B8 = Integer.toString((int) (Math.random() * DM.getSD()));}
<ide> } else {
<ide> Log.e("BasicsPractice.java", "Something went horribly wrong on line : 160-181");
<ide> }
<ide> int d2 = d8.intValue();
<ide> AS1 = d1 / d2;
<ide> Output = d1 + " / " + d2;
<add> if(AS1 < 0) { GenerateOperation(); }
<add> if(d2 > d1) { GenerateOperation(); }
<ide> if (getS1() == 0) {
<ide> B1 = Integer.toString(AS1);
<ide> B2 = Integer.toString((int) (Math.random() * DM.getDD())); |
|
JavaScript | mit | 3fc695ecbc1f2aa983044adf3b33c50d43a5bd01 | 0 | frdmn/grunt-anybar | /*
* grunt-anybar
* https://github.com/frdmn/grunt-anybar
*
* Copyright (c) 2015 Jonas Friedmann
* Licensed under the MIT license.
*/
'use strict';
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
jshint: {
all: [
'Gruntfile.js',
'tasks/*.js',
'<%= nodeunit.tests %>'
],
options: {
jshintrc: '.jshintrc'
}
},
// Before generating any new files, remove any previously-created files.
clean: {
tests: ['tmp']
},
// Configuration to be run (and then tested).
anybar: {
default_options: {
options: {
status: 'green'
}
},
custom_options: {
options: {
port: 1736,
status: 'green'
}
}
},
// Unit tests.
nodeunit: {
tests: ['test/*_test.js']
}
});
// Actually load this plugin's task(s).
grunt.loadTasks('tasks');
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-nodeunit');
// Whenever the "test" task is run, first clean the "tmp" dir, then run this
// plugin's task(s), then test the result.
grunt.registerTask('test', ['clean', 'anybar', 'nodeunit']);
// By default, lint and run all tests.
grunt.registerTask('default', ['jshint', 'test']);
};
| Gruntfile.js | /*
* grunt-anybar
* https://github.com/frdmn/grunt-anybar
*
* Copyright (c) 2015 Jonas Friedmann
* Licensed under the MIT license.
*/
'use strict';
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
jshint: {
all: [
'Gruntfile.js',
'tasks/*.js',
'<%= nodeunit.tests %>'
],
options: {
jshintrc: '.jshintrc'
}
},
// Before generating any new files, remove any previously-created files.
clean: {
tests: ['tmp']
},
// Configuration to be run (and then tested).
anybar: {
default_options: {
options: {
},
files: {
'tmp/default_options': ['test/fixtures/testing', 'test/fixtures/123']
}
},
custom_options: {
options: {
separator: ': ',
punctuation: ' !!!'
},
files: {
'tmp/custom_options': ['test/fixtures/testing', 'test/fixtures/123']
}
}
},
// Unit tests.
nodeunit: {
tests: ['test/*_test.js']
}
});
// Actually load this plugin's task(s).
grunt.loadTasks('tasks');
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-nodeunit');
// Whenever the "test" task is run, first clean the "tmp" dir, then run this
// plugin's task(s), then test the result.
grunt.registerTask('test', ['clean', 'anybar', 'nodeunit']);
// By default, lint and run all tests.
grunt.registerTask('default', ['jshint', 'test']);
};
| Adjust test tasks
| Gruntfile.js | Adjust test tasks | <ide><path>runtfile.js
<ide> anybar: {
<ide> default_options: {
<ide> options: {
<del> },
<del> files: {
<del> 'tmp/default_options': ['test/fixtures/testing', 'test/fixtures/123']
<add> status: 'green'
<ide> }
<ide> },
<ide> custom_options: {
<ide> options: {
<del> separator: ': ',
<del> punctuation: ' !!!'
<del> },
<del> files: {
<del> 'tmp/custom_options': ['test/fixtures/testing', 'test/fixtures/123']
<add> port: 1736,
<add> status: 'green'
<ide> }
<ide> }
<ide> },
<ide> nodeunit: {
<ide> tests: ['test/*_test.js']
<ide> }
<del>
<ide> });
<ide>
<ide> // Actually load this plugin's task(s). |
|
Java | apache-2.0 | 000eedc057e305377aea3190935343119a87e1b5 | 0 | ruediste/salta | package com.github.ruediste.salta.standard.util;
import java.util.Map;
import java.util.Optional;
import com.github.ruediste.salta.core.Binding;
import com.github.ruediste.salta.standard.ScopeImpl.ScopeHandler;
import com.google.common.collect.Maps;
public abstract class SimpleScopeManagerBase implements ScopeHandler {
protected final ThreadLocal<ScopeState> currentState = new ThreadLocal<>();
protected final String scopeName;
public SimpleScopeManagerBase(String scopeName) {
this.scopeName = scopeName;
}
public static class ScopeState {
Map<Binding, Object> data = Maps.newHashMap();
ScopeState() {
}
}
public void inScopeDo(ScopeState state, Runnable runnable) {
ScopeState old = setState(state);
try {
runnable.run();
} finally {
setState(old);
}
}
public Optional<Map<Binding, Object>> tryGetValueMap() {
return Optional.ofNullable(currentState.get()).map(d -> d.data);
}
/**
* Return the current value map.
*
* @throws RuntimeException
* if no scope is active
*/
public Map<Binding, Object> getValueMap() {
return tryGetValueMap()
.orElseThrow(() -> new RuntimeException("Cannot access value map outside of scope " + scopeName));
}
/**
* Set a fresh scope state.
*
* @return the old scope state
*/
public ScopeState setFreshState() {
return setState(createFreshState());
}
public ScopeState createFreshState() {
return new ScopeState();
}
public boolean isScopeActive() {
return currentState.get() != null;
}
/**
* Set a given scope state
*
* @param state
* state to set, or null to set to an empty state
* @return the old scope state
*/
public ScopeState setState(ScopeState state) {
ScopeState old = currentState.get();
if (state == null)
currentState.remove();
else
currentState.set(state);
return old;
}
} | core/src/main/java/com/github/ruediste/salta/standard/util/SimpleScopeManagerBase.java | package com.github.ruediste.salta.standard.util;
import java.util.Map;
import java.util.Optional;
import com.github.ruediste.salta.core.Binding;
import com.github.ruediste.salta.standard.ScopeImpl.ScopeHandler;
import com.google.common.collect.Maps;
public abstract class SimpleScopeManagerBase implements ScopeHandler {
protected final ThreadLocal<ScopeState> currentState = new ThreadLocal<>();
protected final String scopeName;
public SimpleScopeManagerBase(String scopeName) {
this.scopeName = scopeName;
}
public static class ScopeState {
Map<Binding, Object> data = Maps.newHashMap();
ScopeState() {
}
}
public void inScopeDo(ScopeState state, Runnable runnable) {
ScopeState old = setState(state);
try {
runnable.run();
} finally {
setState(old);
}
}
public Optional<Map<Binding, Object>> tryGetValueMap() {
return Optional.ofNullable(currentState.get()).map(d -> d.data);
}
/**
* Return the current value map.
*
* @throws RuntimeException
* if no scope is active
*/
public Map<Binding, Object> getValueMap() {
return tryGetValueMap()
.orElseThrow(() -> new RuntimeException("Cannot access value map outside of scope " + scopeName));
}
/**
* Set a fresh scope state.
*
* @return the old scope state
*/
public ScopeState setFreshState() {
return setState(createFreshState());
}
public ScopeState createFreshState() {
return new ScopeState();
}
/**
* Set a given scope state
*
* @param state
* state to set, or null to set to an empty state
* @return the old scope state
*/
public ScopeState setState(ScopeState state) {
ScopeState old = currentState.get();
if (state == null)
currentState.remove();
else
currentState.set(state);
return old;
}
} | add isScopeActive
| core/src/main/java/com/github/ruediste/salta/standard/util/SimpleScopeManagerBase.java | add isScopeActive | <ide><path>ore/src/main/java/com/github/ruediste/salta/standard/util/SimpleScopeManagerBase.java
<ide>
<ide> public abstract class SimpleScopeManagerBase implements ScopeHandler {
<ide>
<del> protected final ThreadLocal<ScopeState> currentState = new ThreadLocal<>();
<del> protected final String scopeName;
<add> protected final ThreadLocal<ScopeState> currentState = new ThreadLocal<>();
<add> protected final String scopeName;
<ide>
<del> public SimpleScopeManagerBase(String scopeName) {
<del> this.scopeName = scopeName;
<del> }
<add> public SimpleScopeManagerBase(String scopeName) {
<add> this.scopeName = scopeName;
<add> }
<ide>
<del> public static class ScopeState {
<del> Map<Binding, Object> data = Maps.newHashMap();
<add> public static class ScopeState {
<add> Map<Binding, Object> data = Maps.newHashMap();
<ide>
<del> ScopeState() {
<del> }
<del> }
<add> ScopeState() {
<add> }
<add> }
<ide>
<del> public void inScopeDo(ScopeState state, Runnable runnable) {
<del> ScopeState old = setState(state);
<del> try {
<del> runnable.run();
<del> } finally {
<del> setState(old);
<del> }
<del> }
<add> public void inScopeDo(ScopeState state, Runnable runnable) {
<add> ScopeState old = setState(state);
<add> try {
<add> runnable.run();
<add> } finally {
<add> setState(old);
<add> }
<add> }
<ide>
<del> public Optional<Map<Binding, Object>> tryGetValueMap() {
<del> return Optional.ofNullable(currentState.get()).map(d -> d.data);
<del> }
<add> public Optional<Map<Binding, Object>> tryGetValueMap() {
<add> return Optional.ofNullable(currentState.get()).map(d -> d.data);
<add> }
<ide>
<del> /**
<del> * Return the current value map.
<del> *
<del> * @throws RuntimeException
<del> * if no scope is active
<del> */
<del> public Map<Binding, Object> getValueMap() {
<del> return tryGetValueMap()
<del> .orElseThrow(() -> new RuntimeException("Cannot access value map outside of scope " + scopeName));
<del> }
<add> /**
<add> * Return the current value map.
<add> *
<add> * @throws RuntimeException
<add> * if no scope is active
<add> */
<add> public Map<Binding, Object> getValueMap() {
<add> return tryGetValueMap()
<add> .orElseThrow(() -> new RuntimeException("Cannot access value map outside of scope " + scopeName));
<add> }
<ide>
<del> /**
<del> * Set a fresh scope state.
<del> *
<del> * @return the old scope state
<del> */
<del> public ScopeState setFreshState() {
<del> return setState(createFreshState());
<del> }
<add> /**
<add> * Set a fresh scope state.
<add> *
<add> * @return the old scope state
<add> */
<add> public ScopeState setFreshState() {
<add> return setState(createFreshState());
<add> }
<ide>
<del> public ScopeState createFreshState() {
<del> return new ScopeState();
<del> }
<add> public ScopeState createFreshState() {
<add> return new ScopeState();
<add> }
<ide>
<del> /**
<del> * Set a given scope state
<del> *
<del> * @param state
<del> * state to set, or null to set to an empty state
<del> * @return the old scope state
<del> */
<del> public ScopeState setState(ScopeState state) {
<del> ScopeState old = currentState.get();
<del> if (state == null)
<del> currentState.remove();
<del> else
<del> currentState.set(state);
<del> return old;
<del> }
<add> public boolean isScopeActive() {
<add> return currentState.get() != null;
<add> }
<add>
<add> /**
<add> * Set a given scope state
<add> *
<add> * @param state
<add> * state to set, or null to set to an empty state
<add> * @return the old scope state
<add> */
<add> public ScopeState setState(ScopeState state) {
<add> ScopeState old = currentState.get();
<add> if (state == null)
<add> currentState.remove();
<add> else
<add> currentState.set(state);
<add> return old;
<add> }
<ide> } |
|
Java | mit | 0a14c148116822bce296ddb0fed0b6c4c4294884 | 0 | elBukkit/MagicLib,elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin | package com.elmakers.mine.bukkit.spell;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.command.BlockCommandSender;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.potion.PotionEffect;
import org.bukkit.util.BlockIterator;
import org.bukkit.util.Vector;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.spell.TargetType;
import com.elmakers.mine.bukkit.block.MaterialAndData;
import com.elmakers.mine.bukkit.block.MaterialBrush;
import com.elmakers.mine.bukkit.block.batch.BlockAction;
import com.elmakers.mine.bukkit.utility.ConfigurationUtils;
import com.elmakers.mine.bukkit.utility.Target;
public abstract class TargetingSpell extends BaseSpell {
private static final int MAX_RANGE = 511;
private Target target = null;
private String targetName = null;
private TargetType targetType = TargetType.OTHER;
private boolean targetNPCs = false;
private int verticalSearchDistance = 8;
private boolean targetingComplete = false;
private boolean targetSpaceRequired = false;
private Class<? extends Entity> targetEntityType = null;
private Location targetLocation;
private Vector targetLocationOffset;
private String targetLocationWorldName;
protected Location targetLocation2;
private Entity targetEntity = null;
private boolean bypassBuildRestriction = false;
private boolean allowMaxRange = false;
private int range = 32;
private Set<Material> targetThroughMaterials = new HashSet<Material>();
private boolean reverseTargeting = false;
private BlockIterator blockIterator = null;
private Block currentBlock = null;
private Block previousBlock = null;
private Block previousPreviousBlock = null;
private boolean pvpRestricted = false;
private boolean bypassPvpRestriction = false;
@Override
protected void preCast()
{
super.preCast();
initializeTargeting();
}
protected void initializeTargeting()
{
blockIterator = null;
targetSpaceRequired = false;
reverseTargeting = false;
targetingComplete = false;
}
public void setTargetType(TargetType t) {
this.targetType = t;
if (target != null) {
target = null;
initializeTargeting();
}
}
protected String getTargetName(Entity target)
{
if (target instanceof Player)
{
return ((Player)target).getName();
}
if (controller.isElemental(target))
{
return "Elemental";
}
return target.getType().name().toLowerCase().replace('_', ' ');
}
public String getMessage(String messageKey, String def) {
String message = super.getMessage(messageKey, def);
// Escape targeting parameters
String useTargetName = targetName;
if (useTargetName == null) {
if (target != null && target.hasEntity()) {
useTargetName = getTargetName(target.getEntity());
}
else {
useTargetName = "Unknown";
}
}
message = message.replace("$target", useTargetName);
return message;
}
protected void setTargetName(String name) {
targetName = name;
}
public void targetThrough(Material mat)
{
targetThroughMaterials.add(mat);
}
public void targetThrough(Set<Material> mat)
{
targetThroughMaterials.clear();
targetThroughMaterials.addAll(mat);
}
public void noTargetThrough(Material mat)
{
targetThroughMaterials.remove(mat);
}
public boolean isTargetable(Material mat)
{
if (!allowPassThrough(mat)) {
return true;
}
boolean targetThrough = targetThroughMaterials.contains(mat);
if (reverseTargeting)
{
return(targetThrough);
}
return !targetThrough;
}
public void setReverseTargeting(boolean reverse)
{
reverseTargeting = reverse;
}
public boolean isReverseTargeting()
{
return reverseTargeting;
}
public void setTargetSpaceRequired()
{
targetSpaceRequired = true;
}
public void setTarget(Location location) {
target = new Target(getLocation(), location.getBlock());
}
public boolean hasBuildPermission(Block block)
{
if (bypassBuildRestriction) return true;
return mage.hasBuildPermission(block);
}
protected void offsetTarget(int dx, int dy, int dz) {
Location location = getLocation();
if (location == null) {
return;
}
location.add(dx, dy, dz);
initializeBlockIterator(location);
}
protected boolean initializeBlockIterator(Location location) {
if (location.getBlockY() < 0) {
location = location.clone();
location.setY(0);
}
if (location.getBlockY() > controller.getMaxY()) {
location = location.clone();
location.setY(controller.getMaxY());
}
try {
blockIterator = new BlockIterator(location, VIEW_HEIGHT, getMaxRange());
} catch (Exception ex) {
// This seems to happen randomly, like when you use the same target.
// Very annoying, and I now kind of regret switching to BlockIterator.
// At any rate, we're going to just re-use the last target block and
// cross our fingers!
return false;
}
return true;
}
/**
* Move "steps" forward along line of vision and returns the block there
*
* @return The block at the new location
*/
protected Block getNextBlock()
{
previousPreviousBlock = previousBlock;
previousBlock = currentBlock;
if (blockIterator == null || !blockIterator.hasNext()) {
currentBlock = null;
} else {
currentBlock = blockIterator.next();
}
return currentBlock;
}
/**
* Returns the current block along the line of vision
*
* @return The block
*/
public Block getCurBlock()
{
return currentBlock;
}
/**
* Returns the previous block along the line of vision
*
* @return The block
*/
public Block getPreviousBlock()
{
return previousBlock;
}
public TargetType getTargetType()
{
return targetType;
}
protected Target getTarget()
{
target = findTarget();
if (targetLocationOffset != null) {
target.add(targetLocationOffset);
}
if (targetLocationWorldName != null && targetLocationWorldName.length() > 0) {
Location location = target.getLocation();
if (location != null) {
World targetWorld = location.getWorld();
target.setWorld(ConfigurationUtils.overrideWorld(targetLocationWorldName, targetWorld, controller.canCreateWorlds()));
}
}
return target;
}
/**
* Returns the block at the cursor, or null if out of range
*
* @return The target block
*/
public Target findTarget()
{
if (targetType != TargetType.NONE && targetType != TargetType.BLOCK && targetEntity != null) {
return new Target(getLocation(), targetEntity);
}
Player player = getPlayer();
if (targetType == TargetType.SELF && player != null) {
return new Target(getLocation(), player);
}
CommandSender sender = mage.getCommandSender();
if (targetType == TargetType.SELF && player == null && sender != null && (sender instanceof BlockCommandSender)) {
BlockCommandSender commandBlock = (BlockCommandSender)mage.getCommandSender();
return new Target(commandBlock.getBlock().getLocation(), commandBlock.getBlock());
}
Location location = getLocation();
if (targetType == TargetType.SELF && location != null) {
return new Target(location, location.getBlock());
}
if (targetType == TargetType.SELF) {
return new Target(location);
}
if (targetType != TargetType.NONE && targetLocation != null) {
return new Target(getLocation(), targetLocation.getBlock());
}
if (targetType == TargetType.NONE) {
return new Target(getLocation());
}
findTargetBlock();
Block block = getCurBlock();
if (targetType == TargetType.BLOCK) {
return new Target(getLocation(), block);
}
Target targetBlock = block == null ? null : new Target(getLocation(), block);
Target targetEntity = getEntityTarget();
// Don't allow targeting entities in no-PVP areas.
boolean noPvp = targetEntity != null && (targetEntity instanceof Player) && pvpRestricted && !bypassPvpRestriction && !controller.isPVPAllowed(targetEntity.getLocation());
if (noPvp) {
targetEntity = null;
// Don't let the target the block, either.
targetBlock = null;
}
if (targetEntity == null && targetType == TargetType.ANY && player != null) {
return new Target(getLocation(), player, targetBlock == null ? null : targetBlock.getBlock());
}
if (targetBlock != null && targetEntity != null) {
if (targetBlock.getDistance() < targetEntity.getDistance()) {
targetEntity = null;
} else {
targetBlock = null;
}
}
if (targetEntity != null) {
return targetEntity;
} else if (targetBlock != null) {
return targetBlock;
}
return new Target(getLocation());
}
public Target getCurrentTarget()
{
if (target == null) {
target = new Target(getLocation());
}
return target;
}
public void clearTarget()
{
target = null;
}
public Block getTargetBlock()
{
return getTarget().getBlock();
}
protected Target getEntityTarget()
{
if (targetEntityType == null) return null;
List<Target> scored = getAllTargetEntities();
if (scored.size() <= 0) return null;
return scored.get(0);
}
protected List<Target> getAllTargetEntities() {
List<Target> scored = new ArrayList<Target>();
World world = getWorld();
if (world == null) return scored;
List<Entity> entities = world.getEntities();
for (Entity entity : entities)
{
if (entity == getPlayer()) continue;
if (!targetNPCs && entity.hasMetadata("NPC")) continue;
// Special check for Elementals
if (!controller.isElemental(entity) && targetEntityType != null && !targetEntityType.isAssignableFrom(entity.getClass())) continue;
if (entity instanceof Player) {
Mage targetMage = controller.getMage((Player)entity);
if (targetMage.isSuperProtected()) continue;
}
Target newScore = new Target(getLocation(), entity, getMaxRange());
if (newScore.getScore() > 0)
{
scored.add(newScore);
}
}
Collections.sort(scored);
return scored;
}
protected int getMaxRange()
{
if (allowMaxRange) return Math.min(MAX_RANGE, range);
return Math.min(MAX_RANGE, (int)(mage.getRangeMultiplier() * range));
}
protected int getMaxRangeSquared()
{
int maxRange = getMaxRange();
return maxRange * maxRange;
}
protected void setMaxRange(int range, boolean allow)
{
this.range = range;
this.allowMaxRange = allow;
}
protected void setMaxRange(int range)
{
this.range = range;
}
protected boolean isTransparent(Material material)
{
return targetThroughMaterials.contains(material);
}
protected void applyPotionEffects(Location location, int radius, Collection<PotionEffect> potionEffects) {
if (potionEffects == null || radius <= 0 || potionEffects.size() == 0) return;
int radiusSquared = radius * 2;
List<Entity> entities = location.getWorld().getEntities();
for (Entity entity : entities) {
if (entity instanceof LivingEntity) {
Mage targetMage = null;
if (entity instanceof Player) {
Player targetPlayer = (Player)entity;
boolean isSourcePlayer = targetPlayer.getName().equals(mage.getName());
if (isSourcePlayer && getTargetType() != TargetType.ANY && getTargetType() != TargetType.SELF) {
continue;
}
targetMage = controller.getMage(targetPlayer);
// Check for protected players
if (targetMage.isSuperProtected() && !isSourcePlayer) {
continue;
}
}
if (targetEntityType != null && !(targetEntityType.isAssignableFrom(entity.getClass()))) continue;
if (entity.getLocation().distanceSquared(location) < radiusSquared) {
LivingEntity living = (LivingEntity)entity;
living.addPotionEffects(potionEffects);
if (targetMage != null) {
String playerMessage = getMessage("cast_player_message");
if (playerMessage.length() > 0) {
playerMessage = playerMessage.replace("$spell", getName());
targetMage.sendMessage(playerMessage);
}
}
}
}
}
}
protected void findTargetBlock()
{
Location location = getLocation();
if (location == null) {
return;
}
if (targetingComplete)
{
return;
}
if (!initializeBlockIterator(location)) {
return;
}
currentBlock = null;
previousBlock = null;
previousPreviousBlock = null;
Block block = getNextBlock();
while (block != null)
{
if (targetSpaceRequired) {
if (isOkToStandIn(block.getType()) && isOkToStandIn(block.getRelative(BlockFace.UP).getType())) {
break;
}
} else {
if (isTargetable(block.getType())) {
break;
}
}
block = getNextBlock();
}
if (block == null && allowMaxRange) {
currentBlock = previousBlock;
previousBlock = previousPreviousBlock;
}
targetingComplete = true;
}
public Block getInteractBlock() {
Location location = getEyeLocation();
if (location == null) return null;
Block playerBlock = location.getBlock();
if (isTargetable(playerBlock.getType())) return playerBlock;
Vector direction = location.getDirection().normalize();
return location.add(direction).getBlock();
}
public void coverSurface(Location center, int radius, BlockAction action)
{
int y = center.getBlockY();
for (int dx = -radius; dx < radius; ++dx)
{
for (int dz = -radius; dz < radius; ++dz)
{
if (isInCircle(dx, dz, radius))
{
int x = center.getBlockX() + dx;
int z = center.getBlockZ() + dz;
Block block = getWorld().getBlockAt(x, y, z);
int depth = 0;
if (targetThroughMaterials.contains(block.getType()))
{
while (depth < verticalSearchDistance && targetThroughMaterials.contains(block.getType()))
{
depth++;
block = block.getRelative(BlockFace.DOWN);
}
}
else
{
while (depth < verticalSearchDistance && !targetThroughMaterials.contains(block.getType()))
{
depth++;
block = block.getRelative(BlockFace.UP);
}
block = block.getRelative(BlockFace.DOWN);
}
Block coveringBlock = block.getRelative(BlockFace.UP);
if (!targetThroughMaterials.contains(block.getType()) && targetThroughMaterials.contains(coveringBlock.getType()))
{
action.perform(block);
}
}
}
}
}
@Override
protected void reset()
{
super.reset();
this.target = null;
this.targetName = null;
this.targetLocation = null;
}
@SuppressWarnings("unchecked")
@Override
protected void processParameters(ConfigurationSection parameters) {
super.processParameters(parameters);
range = parameters.getInt("range", range);
allowMaxRange = parameters.getBoolean("allow_max_range", allowMaxRange);
bypassPvpRestriction = parameters.getBoolean("bypass_pvp", false);
bypassPvpRestriction = parameters.getBoolean("bp", bypassPvpRestriction);
bypassBuildRestriction = parameters.getBoolean("bypass_build", false);
bypassBuildRestriction = parameters.getBoolean("bb", bypassBuildRestriction);
if (parameters.contains("transparent")) {
targetThroughMaterials.clear();
targetThroughMaterials.addAll(controller.getMaterialSet(parameters.getString("transparent")));
} else {
targetThroughMaterials.clear();
targetThroughMaterials.addAll(controller.getMaterialSet("transparent"));
}
if (parameters.contains("target")) {
String targetTypeName = parameters.getString("target");
try {
targetType = TargetType.valueOf(targetTypeName.toUpperCase());
} catch (Exception ex) {
controller.getLogger().warning("Invalid target_type: " + targetTypeName);
targetType = TargetType.OTHER;
}
} else {
targetType = TargetType.OTHER;
}
targetNPCs = parameters.getBoolean("target_npc", false);
if (parameters.contains("target_type")) {
String entityTypeName = parameters.getString("target_type");
try {
Class<?> typeClass = Class.forName("org.bukkit.entity." + entityTypeName);
if (Entity.class.isAssignableFrom(typeClass)) {
targetEntityType = (Class<? extends Entity>)typeClass;
} else {
controller.getLogger().warning("Entity type: " + entityTypeName + " not assignable to Entity");
}
} catch (Throwable ex) {
controller.getLogger().warning("Unknown entity type: " + entityTypeName);
targetEntityType = null;
}
}
Location defaultLocation = getLocation();
targetLocation = ConfigurationUtils.overrideLocation(parameters, "t", defaultLocation, controller.canCreateWorlds());
targetLocationOffset = null;
Double otxValue = ConfigurationUtils.getDouble(parameters, "otx", null);
Double otyValue = ConfigurationUtils.getDouble(parameters, "oty", null);
Double otzValue = ConfigurationUtils.getDouble(parameters, "otz", null);
if (otxValue != null || otzValue != null || otyValue != null) {
targetLocationOffset = new Vector(
(otxValue == null ? 0 : otxValue),
(otyValue == null ? 0 : otyValue),
(otzValue == null ? 0 : otzValue));
}
targetLocationWorldName = parameters.getString("otworld");
// For two-click construction spells
defaultLocation = targetLocation == null ? defaultLocation : targetLocation;
targetLocation2 = ConfigurationUtils.overrideLocation(parameters, "t2", defaultLocation, controller.canCreateWorlds());
if (parameters.contains("player")) {
Player player = controller.getPlugin().getServer().getPlayer(parameters.getString("player"));
if (player != null) {
targetLocation = player.getLocation();
targetEntity = player;
}
} else {
targetEntity = null;
}
// Special hack that should work well in most casts.
if (isUnderwater()) {
targetThroughMaterials.add(Material.WATER);
targetThroughMaterials.add(Material.STATIONARY_WATER);
}
}
@Override
protected void loadTemplate(ConfigurationSection node)
{
super.loadTemplate(node);
pvpRestricted = node.getBoolean("pvp_restricted", pvpRestricted);
}
@SuppressWarnings("deprecation")
@Override
protected String getDisplayMaterialName()
{
if (target != null && target.isValid()) {
return MaterialBrush.getMaterialName(target.getBlock().getType(), target.getBlock().getData());
}
return super.getDisplayMaterialName();
}
@Override
protected boolean canCast() {
return !pvpRestricted || bypassPvpRestriction || controller.isPVPAllowed(mage.getLocation()) || mage.isSuperPowered();
}
@Override
protected void onBackfire() {
targetType = TargetType.SELF;
}
@Override
public Location getTargetLocation() {
if (target != null && target.isValid()) {
return target.getLocation();
}
return null;
}
@Override
public Entity getTargetEntity() {
if (target != null && target.isValid()) {
return target.getEntity();
}
return null;
}
@Override
public com.elmakers.mine.bukkit.api.block.MaterialAndData getEffectMaterial()
{
if (target != null && target.isValid()) {
Block block = target.getBlock();
MaterialAndData targetMaterial = new MaterialAndData(block);
if (targetMaterial.getMaterial() == Material.AIR) {
targetMaterial.setMaterial(DEFAULT_EFFECT_MATERIAL);
}
return targetMaterial;
}
return super.getEffectMaterial();
}
}
| src/main/java/com/elmakers/mine/bukkit/spell/TargetingSpell.java | package com.elmakers.mine.bukkit.spell;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.command.BlockCommandSender;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.potion.PotionEffect;
import org.bukkit.util.BlockIterator;
import org.bukkit.util.Vector;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.spell.TargetType;
import com.elmakers.mine.bukkit.block.MaterialAndData;
import com.elmakers.mine.bukkit.block.MaterialBrush;
import com.elmakers.mine.bukkit.block.batch.BlockAction;
import com.elmakers.mine.bukkit.utility.ConfigurationUtils;
import com.elmakers.mine.bukkit.utility.Target;
public abstract class TargetingSpell extends BaseSpell {
private static final int MAX_RANGE = 511;
private Target target = null;
private String targetName = null;
private TargetType targetType = TargetType.OTHER;
private boolean targetNPCs = false;
private int verticalSearchDistance = 8;
private boolean targetingComplete = false;
private boolean targetSpaceRequired = false;
private Class<? extends Entity> targetEntityType = null;
private Location targetLocation;
private Vector targetLocationOffset;
private String targetLocationWorldName;
protected Location targetLocation2;
private Entity targetEntity = null;
private boolean bypassBuildRestriction = false;
private boolean allowMaxRange = false;
private int range = 32;
private Set<Material> targetThroughMaterials = new HashSet<Material>();
private boolean reverseTargeting = false;
private BlockIterator blockIterator = null;
private Block currentBlock = null;
private Block previousBlock = null;
private Block previousPreviousBlock = null;
private boolean pvpRestricted = false;
private boolean bypassPvpRestriction = false;
@Override
protected void preCast()
{
super.preCast();
initializeTargeting();
}
protected void initializeTargeting()
{
blockIterator = null;
targetSpaceRequired = false;
reverseTargeting = false;
targetingComplete = false;
}
public void setTargetType(TargetType t) {
this.targetType = t;
if (target != null) {
target = null;
initializeTargeting();
}
}
protected String getTargetName(Entity target)
{
if (target instanceof Player)
{
return ((Player)target).getName();
}
if (controller.isElemental(target))
{
return "Elemental";
}
return target.getType().name().toLowerCase().replace('_', ' ');
}
public String getMessage(String messageKey, String def) {
String message = super.getMessage(messageKey, def);
// Escape targeting parameters
String useTargetName = targetName;
if (useTargetName == null) {
if (target != null && target.hasEntity()) {
useTargetName = getTargetName(target.getEntity());
}
else {
useTargetName = "Unknown";
}
}
message = message.replace("$target", useTargetName);
return message;
}
protected void setTargetName(String name) {
targetName = name;
}
public void targetThrough(Material mat)
{
targetThroughMaterials.add(mat);
}
public void targetThrough(Set<Material> mat)
{
targetThroughMaterials.clear();
targetThroughMaterials.addAll(mat);
}
public void noTargetThrough(Material mat)
{
targetThroughMaterials.remove(mat);
}
public boolean isTargetable(Material mat)
{
if (!allowPassThrough(mat)) {
return true;
}
boolean targetThrough = targetThroughMaterials.contains(mat);
if (reverseTargeting)
{
return(targetThrough);
}
return !targetThrough;
}
public void setReverseTargeting(boolean reverse)
{
reverseTargeting = reverse;
}
public boolean isReverseTargeting()
{
return reverseTargeting;
}
public void setTargetSpaceRequired()
{
targetSpaceRequired = true;
}
public void setTarget(Location location) {
target = new Target(getLocation(), location.getBlock());
}
public boolean hasBuildPermission(Block block)
{
if (bypassBuildRestriction) return true;
return mage.hasBuildPermission(block);
}
protected void offsetTarget(int dx, int dy, int dz) {
Location location = getLocation();
if (location == null) {
return;
}
location.add(dx, dy, dz);
initializeBlockIterator(location);
}
protected boolean initializeBlockIterator(Location location) {
if (location.getBlockY() < 0) {
location = location.clone();
location.setY(0);
}
if (location.getBlockY() > controller.getMaxY()) {
location = location.clone();
location.setY(controller.getMaxY());
}
try {
blockIterator = new BlockIterator(location, VIEW_HEIGHT, getMaxRange());
} catch (Exception ex) {
// This seems to happen randomly, like when you use the same target.
// Very annoying, and I now kind of regret switching to BlockIterator.
// At any rate, we're going to just re-use the last target block and
// cross our fingers!
return false;
}
return true;
}
/**
* Move "steps" forward along line of vision and returns the block there
*
* @return The block at the new location
*/
protected Block getNextBlock()
{
previousPreviousBlock = previousBlock;
previousBlock = currentBlock;
if (blockIterator == null || !blockIterator.hasNext()) {
currentBlock = null;
} else {
currentBlock = blockIterator.next();
}
return currentBlock;
}
/**
* Returns the current block along the line of vision
*
* @return The block
*/
public Block getCurBlock()
{
return currentBlock;
}
/**
* Returns the previous block along the line of vision
*
* @return The block
*/
public Block getPreviousBlock()
{
return previousBlock;
}
public TargetType getTargetType()
{
return targetType;
}
protected Target getTarget()
{
target = findTarget();
if (targetLocationOffset != null) {
target.add(targetLocationOffset);
}
if (targetLocationWorldName != null && targetLocationWorldName.length() > 0) {
Location location = target.getLocation();
if (location != null) {
World targetWorld = location.getWorld();
target.setWorld(ConfigurationUtils.overrideWorld(targetLocationWorldName, targetWorld, controller.canCreateWorlds()));
}
}
return target;
}
/**
* Returns the block at the cursor, or null if out of range
*
* @return The target block
*/
public Target findTarget()
{
if (targetType != TargetType.NONE && targetType != TargetType.BLOCK && targetEntity != null) {
return new Target(getLocation(), targetEntity);
}
Player player = getPlayer();
if (targetType == TargetType.SELF && player != null) {
return new Target(getLocation(), player);
}
CommandSender sender = mage.getCommandSender();
if (targetType == TargetType.SELF && player == null && sender != null && (sender instanceof BlockCommandSender)) {
BlockCommandSender commandBlock = (BlockCommandSender)mage.getCommandSender();
return new Target(commandBlock.getBlock().getLocation(), commandBlock.getBlock());
}
Location location = getLocation();
if (targetType == TargetType.SELF && location != null) {
return new Target(location, location.getBlock());
}
if (targetType == TargetType.SELF) {
return new Target(location);
}
if (targetType != TargetType.NONE && targetLocation != null) {
return new Target(getLocation(), targetLocation.getBlock());
}
if (targetType == TargetType.NONE) {
return new Target(getLocation());
}
findTargetBlock();
Block block = getCurBlock();
if (targetType == TargetType.BLOCK) {
return new Target(getLocation(), block);
}
Target targetBlock = block == null ? null : new Target(getLocation(), block);
Target targetEntity = getEntityTarget();
// Don't allow targeting entities in no-PVP areas.
boolean noPvp = targetEntity != null && (targetEntity instanceof Player) && pvpRestricted && !bypassPvpRestriction && !controller.isPVPAllowed(targetEntity.getLocation());
if (noPvp) {
targetEntity = null;
// Don't let the target the block, either.
targetBlock = null;
}
if (targetEntity == null && targetType == TargetType.ANY && player != null) {
return new Target(getLocation(), player, targetBlock == null ? null : targetBlock.getBlock());
}
if (targetBlock != null && targetEntity != null) {
if (targetBlock.getDistance() < targetEntity.getDistance()) {
targetEntity = null;
} else {
targetBlock = null;
}
}
if (targetEntity != null) {
return targetEntity;
} else if (targetBlock != null) {
return targetBlock;
}
return new Target(getLocation());
}
public Target getCurrentTarget()
{
if (target == null) {
target = new Target(getLocation());
}
return target;
}
public void clearTarget()
{
target = null;
}
public Block getTargetBlock()
{
return getTarget().getBlock();
}
protected Target getEntityTarget()
{
if (targetEntityType == null) return null;
List<Target> scored = getAllTargetEntities();
if (scored.size() <= 0) return null;
return scored.get(0);
}
protected List<Target> getAllTargetEntities() {
List<Target> scored = new ArrayList<Target>();
World world = getWorld();
if (world == null) return scored;
List<Entity> entities = world.getEntities();
for (Entity entity : entities)
{
if (entity == getPlayer()) continue;
if (!targetNPCs && entity.hasMetadata("NPC")) continue;
// Special check for Elementals
if (!controller.isElemental(entity) && targetEntityType != null && !targetEntityType.isAssignableFrom(entity.getClass())) continue;
if (entity instanceof Player) {
Mage targetMage = controller.getMage((Player)entity);
if (targetMage.isSuperProtected()) continue;
}
Target newScore = new Target(getLocation(), entity, getMaxRange());
if (newScore.getScore() > 0)
{
scored.add(newScore);
}
}
Collections.sort(scored);
return scored;
}
protected int getMaxRange()
{
if (allowMaxRange) return Math.min(MAX_RANGE, range);
return Math.min(MAX_RANGE, (int)(mage.getRangeMultiplier() * range));
}
protected int getMaxRangeSquared()
{
int maxRange = getMaxRange();
return maxRange * maxRange;
}
protected void setMaxRange(int range, boolean allow)
{
this.range = range;
this.allowMaxRange = allow;
}
protected void setMaxRange(int range)
{
this.range = range;
}
protected boolean isTransparent(Material material)
{
return targetThroughMaterials.contains(material);
}
protected void applyPotionEffects(Location location, int radius, Collection<PotionEffect> potionEffects) {
if (potionEffects == null || radius <= 0 || potionEffects.size() == 0) return;
int radiusSquared = radius * 2;
List<Entity> entities = location.getWorld().getEntities();
for (Entity entity : entities) {
if (entity instanceof LivingEntity) {
if (entity instanceof Player) {
Player targetPlayer = (Player)entity;
boolean isSourcePlayer = targetPlayer.getName().equals(mage.getName());
if (isSourcePlayer && getTargetType() != TargetType.ANY && getTargetType() != TargetType.SELF) {
continue;
}
Mage targetMage = controller.getMage(targetPlayer);
// Check for protected players
if (targetMage.isSuperProtected() && !isSourcePlayer) {
continue;
}
String playerMessage = getMessage("cast_player_message");
if (playerMessage.length() > 0) {
playerMessage = playerMessage.replace("$spell", getName());
targetMage.sendMessage(playerMessage);
}
}
if (targetEntityType != null && !(targetEntityType.isAssignableFrom(entity.getClass()))) continue;
if (entity.getLocation().distanceSquared(location) < radiusSquared) {
LivingEntity living = (LivingEntity)entity;
living.addPotionEffects(potionEffects);
}
}
}
}
protected void findTargetBlock()
{
Location location = getLocation();
if (location == null) {
return;
}
if (targetingComplete)
{
return;
}
if (!initializeBlockIterator(location)) {
return;
}
currentBlock = null;
previousBlock = null;
previousPreviousBlock = null;
Block block = getNextBlock();
while (block != null)
{
if (targetSpaceRequired) {
if (isOkToStandIn(block.getType()) && isOkToStandIn(block.getRelative(BlockFace.UP).getType())) {
break;
}
} else {
if (isTargetable(block.getType())) {
break;
}
}
block = getNextBlock();
}
if (block == null && allowMaxRange) {
currentBlock = previousBlock;
previousBlock = previousPreviousBlock;
}
targetingComplete = true;
}
public Block getInteractBlock() {
Location location = getEyeLocation();
if (location == null) return null;
Block playerBlock = location.getBlock();
if (isTargetable(playerBlock.getType())) return playerBlock;
Vector direction = location.getDirection().normalize();
return location.add(direction).getBlock();
}
public void coverSurface(Location center, int radius, BlockAction action)
{
int y = center.getBlockY();
for (int dx = -radius; dx < radius; ++dx)
{
for (int dz = -radius; dz < radius; ++dz)
{
if (isInCircle(dx, dz, radius))
{
int x = center.getBlockX() + dx;
int z = center.getBlockZ() + dz;
Block block = getWorld().getBlockAt(x, y, z);
int depth = 0;
if (targetThroughMaterials.contains(block.getType()))
{
while (depth < verticalSearchDistance && targetThroughMaterials.contains(block.getType()))
{
depth++;
block = block.getRelative(BlockFace.DOWN);
}
}
else
{
while (depth < verticalSearchDistance && !targetThroughMaterials.contains(block.getType()))
{
depth++;
block = block.getRelative(BlockFace.UP);
}
block = block.getRelative(BlockFace.DOWN);
}
Block coveringBlock = block.getRelative(BlockFace.UP);
if (!targetThroughMaterials.contains(block.getType()) && targetThroughMaterials.contains(coveringBlock.getType()))
{
action.perform(block);
}
}
}
}
}
@Override
protected void reset()
{
super.reset();
this.target = null;
this.targetName = null;
this.targetLocation = null;
}
@SuppressWarnings("unchecked")
@Override
protected void processParameters(ConfigurationSection parameters) {
super.processParameters(parameters);
range = parameters.getInt("range", range);
allowMaxRange = parameters.getBoolean("allow_max_range", allowMaxRange);
bypassPvpRestriction = parameters.getBoolean("bypass_pvp", false);
bypassPvpRestriction = parameters.getBoolean("bp", bypassPvpRestriction);
bypassBuildRestriction = parameters.getBoolean("bypass_build", false);
bypassBuildRestriction = parameters.getBoolean("bb", bypassBuildRestriction);
if (parameters.contains("transparent")) {
targetThroughMaterials.clear();
targetThroughMaterials.addAll(controller.getMaterialSet(parameters.getString("transparent")));
} else {
targetThroughMaterials.clear();
targetThroughMaterials.addAll(controller.getMaterialSet("transparent"));
}
if (parameters.contains("target")) {
String targetTypeName = parameters.getString("target");
try {
targetType = TargetType.valueOf(targetTypeName.toUpperCase());
} catch (Exception ex) {
controller.getLogger().warning("Invalid target_type: " + targetTypeName);
targetType = TargetType.OTHER;
}
} else {
targetType = TargetType.OTHER;
}
targetNPCs = parameters.getBoolean("target_npc", false);
if (parameters.contains("target_type")) {
String entityTypeName = parameters.getString("target_type");
try {
Class<?> typeClass = Class.forName("org.bukkit.entity." + entityTypeName);
if (Entity.class.isAssignableFrom(typeClass)) {
targetEntityType = (Class<? extends Entity>)typeClass;
} else {
controller.getLogger().warning("Entity type: " + entityTypeName + " not assignable to Entity");
}
} catch (Throwable ex) {
controller.getLogger().warning("Unknown entity type: " + entityTypeName);
targetEntityType = null;
}
}
Location defaultLocation = getLocation();
targetLocation = ConfigurationUtils.overrideLocation(parameters, "t", defaultLocation, controller.canCreateWorlds());
targetLocationOffset = null;
Double otxValue = ConfigurationUtils.getDouble(parameters, "otx", null);
Double otyValue = ConfigurationUtils.getDouble(parameters, "oty", null);
Double otzValue = ConfigurationUtils.getDouble(parameters, "otz", null);
if (otxValue != null || otzValue != null || otyValue != null) {
targetLocationOffset = new Vector(
(otxValue == null ? 0 : otxValue),
(otyValue == null ? 0 : otyValue),
(otzValue == null ? 0 : otzValue));
}
targetLocationWorldName = parameters.getString("otworld");
// For two-click construction spells
defaultLocation = targetLocation == null ? defaultLocation : targetLocation;
targetLocation2 = ConfigurationUtils.overrideLocation(parameters, "t2", defaultLocation, controller.canCreateWorlds());
if (parameters.contains("player")) {
Player player = controller.getPlugin().getServer().getPlayer(parameters.getString("player"));
if (player != null) {
targetLocation = player.getLocation();
targetEntity = player;
}
} else {
targetEntity = null;
}
// Special hack that should work well in most casts.
if (isUnderwater()) {
targetThroughMaterials.add(Material.WATER);
targetThroughMaterials.add(Material.STATIONARY_WATER);
}
}
@Override
protected void loadTemplate(ConfigurationSection node)
{
super.loadTemplate(node);
pvpRestricted = node.getBoolean("pvp_restricted", pvpRestricted);
}
@SuppressWarnings("deprecation")
@Override
protected String getDisplayMaterialName()
{
if (target != null && target.isValid()) {
return MaterialBrush.getMaterialName(target.getBlock().getType(), target.getBlock().getData());
}
return super.getDisplayMaterialName();
}
@Override
protected boolean canCast() {
return !pvpRestricted || bypassPvpRestriction || controller.isPVPAllowed(mage.getLocation()) || mage.isSuperPowered();
}
@Override
protected void onBackfire() {
targetType = TargetType.SELF;
}
@Override
public Location getTargetLocation() {
if (target != null && target.isValid()) {
return target.getLocation();
}
return null;
}
@Override
public Entity getTargetEntity() {
if (target != null && target.isValid()) {
return target.getEntity();
}
return null;
}
@Override
public com.elmakers.mine.bukkit.api.block.MaterialAndData getEffectMaterial()
{
if (target != null && target.isValid()) {
Block block = target.getBlock();
MaterialAndData targetMaterial = new MaterialAndData(block);
if (targetMaterial.getMaterial() == Material.AIR) {
targetMaterial.setMaterial(DEFAULT_EFFECT_MATERIAL);
}
return targetMaterial;
}
return super.getEffectMaterial();
}
}
| Fix spammy potion effect messages
| src/main/java/com/elmakers/mine/bukkit/spell/TargetingSpell.java | Fix spammy potion effect messages | <ide><path>rc/main/java/com/elmakers/mine/bukkit/spell/TargetingSpell.java
<ide> List<Entity> entities = location.getWorld().getEntities();
<ide> for (Entity entity : entities) {
<ide> if (entity instanceof LivingEntity) {
<add> Mage targetMage = null;
<ide> if (entity instanceof Player) {
<ide> Player targetPlayer = (Player)entity;
<ide> boolean isSourcePlayer = targetPlayer.getName().equals(mage.getName());
<ide> continue;
<ide> }
<ide>
<del> Mage targetMage = controller.getMage(targetPlayer);
<del>
<add> targetMage = controller.getMage(targetPlayer);
<ide> // Check for protected players
<ide> if (targetMage.isSuperProtected() && !isSourcePlayer) {
<ide> continue;
<del> }
<del>
<del> String playerMessage = getMessage("cast_player_message");
<del> if (playerMessage.length() > 0) {
<del> playerMessage = playerMessage.replace("$spell", getName());
<del> targetMage.sendMessage(playerMessage);
<ide> }
<ide> }
<ide>
<ide> if (entity.getLocation().distanceSquared(location) < radiusSquared) {
<ide> LivingEntity living = (LivingEntity)entity;
<ide> living.addPotionEffects(potionEffects);
<add>
<add> if (targetMage != null) {
<add> String playerMessage = getMessage("cast_player_message");
<add> if (playerMessage.length() > 0) {
<add> playerMessage = playerMessage.replace("$spell", getName());
<add> targetMage.sendMessage(playerMessage);
<add> }
<add> }
<ide> }
<ide> }
<ide> } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.