lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | agpl-3.0 | 454d30ba8202884abc6a268b621e215a001e5c06 | 0 | duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot,duncte123/SkyBot | /*
* Skybot, a multipurpose discord bot
* Copyright (C) 2017 - 2018 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan & Maurice R S "Sanduhr32"
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.duncte123.skybot.utils;
import kotlin.Deprecated;
import ml.duncte123.skybot.unstable.utils.ComparatingUtils;
import net.dv8tion.jda.core.MessageBuilder;
import net.dv8tion.jda.core.Permission;
import net.dv8tion.jda.core.entities.ChannelType;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.MessageEmbed;
import net.dv8tion.jda.core.entities.TextChannel;
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent;
import net.dv8tion.jda.core.exceptions.ErrorResponseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
@SuppressWarnings({"unused", "WeakerAccess"})
public class MessageUtils {
private static Logger logger = LoggerFactory.getLogger(MessageUtils.class);
private static final Consumer<Throwable> CUSTOM_QUEUE_ERROR = ex -> {
if (ex instanceof ErrorResponseException) {
ComparatingUtils.execCheck(ex);
if (((ErrorResponseException) ex).getErrorCode() != 10008) {
logger.error("RestAction queue returned failure", ex);
ex.printStackTrace();
}
}
};
/**
* This will react with a ❌ if the user doesn't have permission to run the command
*
* @param message the message to add the reaction to
*/
public static void sendError(Message message) {
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_ADD_REACTION)) {
return;
}
}
message.addReaction("❌").queue(null, CUSTOM_QUEUE_ERROR);
}
/**
* This method uses the sendError and sendMsg methods
*
* @param message the {@link Message} for the sendError method
* @param text the {@link String} for the sendMsg method
*/
public static void sendErrorWithMessage(Message message, String text) {
sendError(message);
sendMsg(message.getTextChannel(), text);
}
/**
* This will react with a ❌ if the user doesn't have permission to run the command or any other error while execution
*
* @param message the message to add the reaction to
* @param error the cause
*/
public static void sendErrorJSON(Message message, Throwable error, final boolean print) {
if (print)
logger.error(error.getLocalizedMessage(), error);
//Makes no difference if we use sendError or check here both perm types
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_READ,
Permission.MESSAGE_WRITE, Permission.MESSAGE_ATTACH_FILES, Permission.MESSAGE_ADD_REACTION)) {
return;
}
}
message.addReaction("❌").queue(null, CUSTOM_QUEUE_ERROR);
message.getChannel().sendFile(EarthUtils.throwableToJSONObject(error).toString(4).getBytes(), "error.json",
new MessageBuilder().setEmbed(EmbedUtils.defaultEmbed().setTitle("We got an error!").setDescription(String.format("Error type: %s",
error.getClass().getSimpleName())).build()).build()
).queue();
}
/**
* This will react with a ✅ if the user doesn't have permission to run the command
*
* @param message the message to add the reaction to
*/
public static void sendSuccess(Message message) {
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_ADD_REACTION)) {
message.addReaction("✅").queue(null, ignored -> {});
}
}
}
/**
* This method uses the sendSuccess and sendMsg methods
*
* @param message the {@link Message} for the sendSuccess method
* @param text the {@link String} for the sendMsg method
*/
public static void sendSuccessWithMessage(Message message, String text) {
sendSuccess(message);
sendMsg(message.getTextChannel(), text);
}
/**
* This will check if we can send a embed and convert it to a message if we can't send embeds
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param embed The embed to send
*/
public static void sendEmbed(GuildMessageReceivedEvent event, MessageEmbed embed) {
sendEmbed(event.getChannel(), embed);
}
/**
* This will check if we can send a embed and convert it to a message if we can't send embeds
*
* @param channel the {@link TextChannel TextChannel} that we want to send the embed to
* @param embed The embed to send
*/
public static void sendEmbed(TextChannel channel, MessageEmbed embed) {
if (channel != null) {
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_EMBED_LINKS)) {
(new MessageBuilder()).append(EmbedUtils.embedToMessage(embed))
.buildAll(MessageBuilder.SplitPolicy.NEWLINE)
.forEach(it -> sendMsg(channel, it));
// sendMsg(channel, EmbedUtils.embedToMessage(embed));
return;
}
//noinspection deprecation
sendMsg(channel, embed);
}
}
public static void editMsg(Message message, Message newContent) {
if (message == null || newContent == null) return;
if (newContent.getEmbeds().size() > 0) {
if (!message.getGuild().getSelfMember().hasPermission(message.getTextChannel(),
Permission.MESSAGE_EMBED_LINKS)) {
MessageBuilder mb = new MessageBuilder()
.append(newContent.getContentRaw())
.append('\n');
newContent.getEmbeds().forEach(
messageEmbed -> mb.append(EmbedUtils.embedToMessage(messageEmbed))
);
message.editMessage(mb.build()).queue();
return;
}
message.editMessage(newContent).queue();
}
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
*/
public static void sendMsgAndDeleteAfter(GuildMessageReceivedEvent event, long delay, TimeUnit unit, String msg) {
sendMsgFormatAndDeleteAfter(event.getChannel(), delay, unit, msg, "");
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param tc an instance of {@link TextChannel TextChannel}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
*/
public static void sendMsgAndDeleteAfter(TextChannel tc, long delay, TimeUnit unit, String msg) {
sendMsgFormatAndDeleteAfter(tc, delay, unit, msg, "");
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormatAndDeleteAfter(GuildMessageReceivedEvent event, long delay, TimeUnit unit, String msg, Object... args) {
sendMsgFormatAndDeleteAfter(event.getChannel(), delay, unit, msg, args);
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param channel the {@link TextChannel TextChannel} that we want to send our message to
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormatAndDeleteAfter(TextChannel channel, long delay, TimeUnit unit, String msg, Object... args) {
sendMsg(channel, new MessageBuilder().append(String.format(msg, args)).build(),
it -> it.delete().reason("automatic remove").queueAfter(delay, unit, null, CUSTOM_QUEUE_ERROR)
);
}
/**
* This is a shortcut for sending formatted messages to a channel
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormat(GuildMessageReceivedEvent event, String msg, Object... args) {
sendMsg(event.getChannel(), (new MessageBuilder().append(String.format(msg, args)).build()), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending formatted messages to a channel
*
* @param channel the {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormat(TextChannel channel, String msg, Object... args) {
sendMsg(channel, (new MessageBuilder().append(String.format(msg, args)).build()), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg, Consumer<Message> success) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
*/
public static void sendMsg(TextChannel channel, String msg) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(TextChannel channel, String msg, Consumer<Message> success) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(TextChannel channel, String msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @deprecated Use {@link #sendEmbed(GuildMessageReceivedEvent, MessageEmbed)}
*/
@Deprecated(message = "use #sendEmbed")
@java.lang.Deprecated
public static void sendMsg(GuildMessageReceivedEvent event, MessageEmbed msg) {
sendMsg(event.getChannel(), (new MessageBuilder()).setEmbed(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @deprecated Use {@link #sendEmbed(TextChannel, MessageEmbed)}
*/
@Deprecated(message = "use #sendEmbed")
@java.lang.Deprecated
public static void sendMsg(TextChannel channel, MessageEmbed msg) {
sendMsg(channel, (new MessageBuilder()).setEmbed(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg) {
sendMsg(event.getChannel(), msg, null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg, Consumer<Message> success) {
sendMsg(event.getChannel(), msg, success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(event.getChannel(), msg, success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
*/
public static void sendMsg(TextChannel channel, Message msg) {
sendMsg(channel, msg, null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(TextChannel channel, Message msg, Consumer<Message> success) {
sendMsg(channel, msg, success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(TextChannel channel, Message msg, Consumer<Message> success, Consumer<Throwable> failure) {
//Check if the channel exists
if (channel != null && channel.getGuild().getTextChannelById(channel.getId()) != null) {
//Only send a message if we can talk
if(channel.getGuild().getSelfMember().hasPermission(channel,Permission.MESSAGE_WRITE, Permission.MESSAGE_READ))
channel.sendMessage(msg).queue(success, failure);
}
}
}
| src/main/java/ml/duncte123/skybot/utils/MessageUtils.java | /*
* Skybot, a multipurpose discord bot
* Copyright (C) 2017 - 2018 Duncan "duncte123" Sterken & Ramid "ramidzkh" Khan & Maurice R S "Sanduhr32"
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.duncte123.skybot.utils;
import kotlin.Deprecated;
import ml.duncte123.skybot.unstable.utils.ComparatingUtils;
import net.dv8tion.jda.core.MessageBuilder;
import net.dv8tion.jda.core.Permission;
import net.dv8tion.jda.core.entities.ChannelType;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.MessageEmbed;
import net.dv8tion.jda.core.entities.TextChannel;
import net.dv8tion.jda.core.events.message.guild.GuildMessageReceivedEvent;
import net.dv8tion.jda.core.exceptions.ErrorResponseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
@SuppressWarnings({"unused", "WeakerAccess"})
public class MessageUtils {
private static Logger logger = LoggerFactory.getLogger(MessageUtils.class);
private static final Consumer<Throwable> CUSTOM_QUEUE_ERROR = ex -> {
if (ex instanceof ErrorResponseException) {
ComparatingUtils.execCheck(ex);
if (((ErrorResponseException) ex).getErrorCode() != 10008) {
logger.error("RestAction queue returned failure", ex);
ex.printStackTrace();
}
}
};
/**
* This will react with a ❌ if the user doesn't have permission to run the command
*
* @param message the message to add the reaction to
*/
public static void sendError(Message message) {
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_ADD_REACTION)) {
return;
}
}
message.addReaction("❌").queue(null, CUSTOM_QUEUE_ERROR);
}
/**
* This method uses the sendError and sendMsg methods
*
* @param message the {@link Message} for the sendError method
* @param text the {@link String} for the sendMsg method
*/
public static void sendErrorWithMessage(Message message, String text) {
sendError(message);
sendMsg(message.getTextChannel(), text);
}
/**
* This will react with a ❌ if the user doesn't have permission to run the command or any other error while execution
*
* @param message the message to add the reaction to
* @param error the cause
*/
public static void sendErrorJSON(Message message, Throwable error, final boolean print) {
if (print)
logger.error(error.getLocalizedMessage(), error);
//Makes no difference if we use sendError or check here both perm types
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_READ,
Permission.MESSAGE_WRITE, Permission.MESSAGE_ATTACH_FILES, Permission.MESSAGE_ADD_REACTION)) {
return;
}
}
message.addReaction("❌").queue(null, CUSTOM_QUEUE_ERROR);
message.getChannel().sendFile(EarthUtils.throwableToJSONObject(error).toString(4).getBytes(), "error.json",
new MessageBuilder().setEmbed(EmbedUtils.defaultEmbed().setTitle("We got an error!").setDescription(String.format("Error type: %s",
error.getClass().getSimpleName())).build()).build()
).queue();
}
/**
* This will react with a ✅ if the user doesn't have permission to run the command
*
* @param message the message to add the reaction to
*/
public static void sendSuccess(Message message) {
if (message.getChannelType() == ChannelType.TEXT) {
TextChannel channel = message.getTextChannel();
if (channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_ADD_REACTION)) {
message.addReaction("✅").queue(null, ignored -> {});
}
}
}
/**
* This method uses the sendSuccess and sendMsg methods
*
* @param message the {@link Message} for the sendSuccess method
* @param text the {@link String} for the sendMsg method
*/
public static void sendSuccessWithMessage(Message message, String text) {
sendSuccess(message);
sendMsg(message.getTextChannel(), text);
}
/**
* This will check if we can send a embed and convert it to a message if we can't send embeds
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param embed The embed to send
*/
public static void sendEmbed(GuildMessageReceivedEvent event, MessageEmbed embed) {
sendEmbed(event.getChannel(), embed);
}
/**
* This will check if we can send a embed and convert it to a message if we can't send embeds
*
* @param channel the {@link TextChannel TextChannel} that we want to send the embed to
* @param embed The embed to send
*/
public static void sendEmbed(TextChannel channel, MessageEmbed embed) {
if (channel != null) {
if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_EMBED_LINKS)) {
sendMsg(channel, EmbedUtils.embedToMessage(embed));
return;
}
//noinspection deprecation
sendMsg(channel, embed);
}
}
public static void editMsg(Message message, Message newContent) {
if (message == null || newContent == null) return;
if (newContent.getEmbeds().size() > 0) {
if (!message.getGuild().getSelfMember().hasPermission(message.getTextChannel(),
Permission.MESSAGE_EMBED_LINKS)) {
MessageBuilder mb = new MessageBuilder()
.append(newContent.getContentRaw())
.append('\n');
newContent.getEmbeds().forEach(
messageEmbed -> mb.append(EmbedUtils.embedToMessage(messageEmbed))
);
message.editMessage(mb.build()).queue();
return;
}
message.editMessage(newContent).queue();
}
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
*/
public static void sendMsgAndDeleteAfter(GuildMessageReceivedEvent event, long delay, TimeUnit unit, String msg) {
sendMsgFormatAndDeleteAfter(event.getChannel(), delay, unit, msg, "");
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param tc an instance of {@link TextChannel TextChannel}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
*/
public static void sendMsgAndDeleteAfter(TextChannel tc, long delay, TimeUnit unit, String msg) {
sendMsgFormatAndDeleteAfter(tc, delay, unit, msg, "");
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormatAndDeleteAfter(GuildMessageReceivedEvent event, long delay, TimeUnit unit, String msg, Object... args) {
sendMsgFormatAndDeleteAfter(event.getChannel(), delay, unit, msg, args);
}
/**
* This is a shortcut for sending formatted messages to a channel which also deletes it after delay unit
*
* @param channel the {@link TextChannel TextChannel} that we want to send our message to
* @param delay the {@link Long} that is our delay
* @param unit the {@link TimeUnit} that is our unit that uses the delay parameter
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormatAndDeleteAfter(TextChannel channel, long delay, TimeUnit unit, String msg, Object... args) {
sendMsg(channel, new MessageBuilder().append(String.format(msg, args)).build(),
it -> it.delete().reason("automatic remove").queueAfter(delay, unit, null, CUSTOM_QUEUE_ERROR)
);
}
/**
* This is a shortcut for sending formatted messages to a channel
*
* @param event an instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormat(GuildMessageReceivedEvent event, String msg, Object... args) {
sendMsg(event.getChannel(), (new MessageBuilder().append(String.format(msg, args)).build()), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending formatted messages to a channel
*
* @param channel the {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message format to send
* @param args the arguments that should be used in the msg parameter
*/
public static void sendMsgFormat(TextChannel channel, String msg, Object... args) {
sendMsg(channel, (new MessageBuilder().append(String.format(msg, args)).build()), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg, Consumer<Message> success) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, String msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(event.getChannel(), (new MessageBuilder()).append(msg).build(), success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
*/
public static void sendMsg(TextChannel channel, String msg) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(TextChannel channel, String msg, Consumer<Message> success) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(TextChannel channel, String msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(channel, (new MessageBuilder()).append(msg).build(), success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @deprecated Use {@link #sendEmbed(GuildMessageReceivedEvent, MessageEmbed)}
*/
@Deprecated(message = "use #sendEmbed")
@java.lang.Deprecated
public static void sendMsg(GuildMessageReceivedEvent event, MessageEmbed msg) {
sendMsg(event.getChannel(), (new MessageBuilder()).setEmbed(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @deprecated Use {@link #sendEmbed(TextChannel, MessageEmbed)}
*/
@Deprecated(message = "use #sendEmbed")
@java.lang.Deprecated
public static void sendMsg(TextChannel channel, MessageEmbed msg) {
sendMsg(channel, (new MessageBuilder()).setEmbed(msg).build(), null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg) {
sendMsg(event.getChannel(), msg, null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg, Consumer<Message> success) {
sendMsg(event.getChannel(), msg, success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param event a instance of {@link GuildMessageReceivedEvent GuildMessageReceivedEvent}
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(GuildMessageReceivedEvent event, Message msg, Consumer<Message> success, Consumer<Throwable> failure) {
sendMsg(event.getChannel(), msg, success, failure);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
*/
public static void sendMsg(TextChannel channel, Message msg) {
sendMsg(channel, msg, null, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
*/
public static void sendMsg(TextChannel channel, Message msg, Consumer<Message> success) {
sendMsg(channel, msg, success, CUSTOM_QUEUE_ERROR);
}
/**
* This is a shortcut for sending messages to a channel
*
* @param channel he {@link TextChannel TextChannel} that we want to send our message to
* @param msg the message to send
* @param success The success consumer
* @param failure the failure consumer
*/
public static void sendMsg(TextChannel channel, Message msg, Consumer<Message> success, Consumer<Throwable> failure) {
//Check if the channel exists
if (channel != null && channel.getGuild().getTextChannelById(channel.getId()) != null) {
//Only send a message if we can talk
if(channel.getGuild().getSelfMember().hasPermission(channel,Permission.MESSAGE_WRITE, Permission.MESSAGE_READ))
channel.sendMessage(msg).queue(success, failure);
}
}
}
| Use build all for large embeds
| src/main/java/ml/duncte123/skybot/utils/MessageUtils.java | Use build all for large embeds | <ide><path>rc/main/java/ml/duncte123/skybot/utils/MessageUtils.java
<ide> public static void sendEmbed(TextChannel channel, MessageEmbed embed) {
<ide> if (channel != null) {
<ide> if (!channel.getGuild().getSelfMember().hasPermission(channel, Permission.MESSAGE_EMBED_LINKS)) {
<del> sendMsg(channel, EmbedUtils.embedToMessage(embed));
<add> (new MessageBuilder()).append(EmbedUtils.embedToMessage(embed))
<add> .buildAll(MessageBuilder.SplitPolicy.NEWLINE)
<add> .forEach(it -> sendMsg(channel, it));
<add>// sendMsg(channel, EmbedUtils.embedToMessage(embed));
<ide> return;
<ide> }
<ide> //noinspection deprecation |
|
Java | mit | d06cb2416bbe173568bf384bdae8cda581bf80c2 | 0 | ButterFaces/ButterFaces,ButterFaces/ButterFaces,ButterFaces/ButterFaces,ButterFaces/ButterFaces,ButterFaces/ButterFaces | /*
* Copyright Lars Michaelis and Stephan Zerhusen 2016.
* Distributed under the MIT License.
* (See accompanying file README.md file or copy at http://opensource.org/licenses/MIT)
*/
package de.larmic.butterfaces.component.html.repeat;
import de.larmic.butterfaces.component.html.repeat.event.RowKeyEventBroadcaster;
import de.larmic.butterfaces.component.html.repeat.event.RowKeyFacesEvent;
import de.larmic.butterfaces.component.html.repeat.model.DataModelWrapper;
import de.larmic.butterfaces.component.html.repeat.model.DataModelWrapperFactory;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenComponentVisitor;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenTreeDataVisitor;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenTreeDataVisitorCallback;
import de.larmic.butterfaces.component.html.repeat.visitor.DataVisitor;
import de.larmic.butterfaces.util.StringJoiner;
import javax.el.ValueExpression;
import javax.faces.FacesException;
import javax.faces.application.FacesMessage;
import javax.faces.component.*;
import javax.faces.component.visit.VisitCallback;
import javax.faces.component.visit.VisitContext;
import javax.faces.component.visit.VisitHint;
import javax.faces.component.visit.VisitResult;
import javax.faces.context.FacesContext;
import javax.faces.event.*;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author Lars Michaelis
*/
public abstract class UIDataAdaptor extends UIComponentBase implements NamingContainer, UniqueIdVendor, ComponentSystemEventListener, SystemEventListener, ChildrenTreeDataVisitorCallback {
private String PRE_RENDER_VIEW_EVENT_REGISTERED = UIDataAdaptor.class.getName() + ":preRenderViewEventRegistered";
private static final Logger LOG = Logger.getLogger(UIDataAdaptor.class.getName());
protected final char separatorChar;
private DataModelWrapper<?> dataModelWrapper = null;
private Integer rowKey = null;
private String containerClientId;
Stack<Object> originalVarValues = new Stack<>();
private enum PropertyKeys {
lastId, var, stateVar, childState, first, rows, value, status
}
public UIDataAdaptor() {
this.subscribeToEvent(PostAddToViewEvent.class, this);
this.subscribeToEvent(PostRestoreStateEvent.class, this);
separatorChar = UINamingContainer.getSeparatorChar(FacesContext.getCurrentInstance());
}
protected Map<String, Object> getVariablesMap(FacesContext facesContext) {
return facesContext.getExternalContext().getRequestMap();
}
public String createUniqueId(FacesContext context, String seed) {
Integer i = (Integer) getStateHelper().get(PropertyKeys.lastId);
int lastId = (i != null) ? i : 0;
getStateHelper().put(PropertyKeys.lastId, ++lastId);
return UIViewRoot.UNIQUE_ID_PREFIX + ((seed == null) ? lastId : seed);
}
public Integer getRowKey() {
return rowKey;
}
@Override
public void setRowKey(FacesContext facesContext, Integer rowKey) {
this.saveChildState(facesContext);
this.rowKey = rowKey;
final int rowKeyAsInt = rowKey != null ? rowKey : -1;
getDataModelWrapper().setRowIndex(rowKeyAsInt);
this.containerClientId = null;
boolean rowSelected = (rowKey != null) && isRowAvailable();
setupVariable(facesContext, rowSelected);
this.restoreChildState(facesContext);
}
protected void saveChildState(FacesContext facesContext) {
final Iterator<UIComponent> itr = dataChildren();
while (itr.hasNext()) {
this.saveChildState(facesContext, itr.next());
}
}
protected void saveChildState(FacesContext facesContext, UIComponent component) {
if (component.isTransient()) {
return;
}
ChildStateHolder childStateHolder = null;
if (component instanceof EditableValueHolder) {
EditableValueHolder evh = (EditableValueHolder) component;
childStateHolder = new ChildStateHolder(evh);
} else if (component instanceof UIForm) {
UIForm form = (UIForm) component;
childStateHolder = new ChildStateHolder(form);
}
if (childStateHolder != null) {
getStateHelper().put(PropertyKeys.childState, component.getClientId(facesContext), childStateHolder);
}
if (component.getChildCount() > 0) {
for (UIComponent child : component.getChildren()) {
saveChildState(facesContext, child);
}
}
if (component.getFacetCount() > 0) {
for (UIComponent facet : component.getFacets().values()) {
saveChildState(facesContext, facet);
}
}
}
public Iterator<UIComponent> dataChildren() {
if (getChildCount() > 0) {
return getChildren().iterator();
} else {
return Collections.<UIComponent>emptyList().iterator();
}
}
protected void restoreChildState(FacesContext facesContext) {
Iterator<UIComponent> itr = dataChildren();
while (itr.hasNext()) {
this.restoreChildState(facesContext, itr.next());
}
}
protected void restoreChildState(FacesContext facesContext, UIComponent component) {
String id = component.getId();
component.setId(id); // Forces client id to be reset
ChildStateHolder childStateHolder = null;
Map<String, ChildStateHolder> savedStatesMap = (Map<String, ChildStateHolder>) getStateHelper().get(PropertyKeys.childState);
if (savedStatesMap != null) {
childStateHolder = savedStatesMap.get(component.getClientId(facesContext));
}
if (childStateHolder == null) {
childStateHolder = ChildStateHolder.EMPTY;
}
if (component instanceof EditableValueHolder) {
EditableValueHolder evh = (EditableValueHolder) component;
childStateHolder.apply(evh);
} else if (component instanceof UIForm) {
UIForm form = (UIForm) component;
childStateHolder.apply(form);
}
if (component.getChildCount() > 0) {
for (UIComponent child : component.getChildren()) {
restoreChildState(facesContext, child);
}
}
if (component.getFacetCount() > 0) {
for (UIComponent facet : component.getFacets().values()) {
restoreChildState(facesContext, facet);
}
}
}
protected FacesEvent wrapEvent(FacesEvent event) {
return new RowKeyFacesEvent(this, event, getRowKey());
}
@Override
public void queueEvent(FacesEvent event) {
super.queueEvent(wrapEvent(event));
}
@Override
public void broadcast(FacesEvent event) throws AbortProcessingException {
if (event instanceof RowKeyFacesEvent) {
RowKeyEventBroadcaster.broadcast(getFacesContext(), (RowKeyFacesEvent) event);
} else {
super.broadcast(event);
}
}
protected DataModelWrapper<?> getDataModelWrapper() {
if (dataModelWrapper == null) {
dataModelWrapper = DataModelWrapperFactory.createDataModelWrapper(getValue());
}
return dataModelWrapper;
}
public int getRowIndex() {
return getDataModelWrapper().getRowIndex();
}
public String getVar() {
return (String) getStateHelper().get(PropertyKeys.var);
}
public void setVar(String var) {
getStateHelper().put(PropertyKeys.var, var);
}
public Object getValue() {
return getStateHelper().eval(PropertyKeys.value);
}
public void setValue(Object value) {
resetDataModel();
getStateHelper().put(PropertyKeys.value, value);
}
public String getStatus() {
return (String) getStateHelper().get(PropertyKeys.status);
}
public void setStatus(String status) {
getStateHelper().put(PropertyKeys.status, status);
}
public int getRowCount() {
return getDataModelWrapper().getRowCount();
}
public Object getRowData() {
return getDataModelWrapper().getRowData();
}
public boolean isRowAvailable() {
return getDataModelWrapper().isRowAvailable();
}
protected void setupVariable(FacesContext faces, boolean rowSelected) {
Map<String, Object> attrs = getVariablesMap(faces);
if (rowSelected) {
setupVariable(getVar(), attrs, getRowData());
} else {
removeVariable(getVar(), attrs);
}
String iterationStatusVar = getStatus();
if (iterationStatusVar != null) {
Map<String, Object> requestMap = getVariablesMap(faces);
if (rowSelected) {
RowStatus iterationStatus = new RowStatus(getRowIndex(), getRowCount());
requestMap.put(iterationStatusVar, iterationStatus);
} else {
requestMap.remove(iterationStatusVar);
}
}
}
private void setupVariable(String var, Map<String, Object> attrs, Object rowData) {
if (var != null) {
attrs.put(var, rowData);
}
}
private void removeVariable(String var, Map<String, Object> attrs) {
if (var != null) {
attrs.remove(var);
}
}
@Override
public String getContainerClientId(FacesContext facesContext) {
if (facesContext == null) {
throw new NullPointerException("context");
}
if (null == containerClientId) {
containerClientId = super.getContainerClientId(facesContext);
final Object rowKey = getRowKey();
if (rowKey != null) {
String rowKeyString = rowKey.toString();
containerClientId = StringJoiner.on(separatorChar).join(Arrays.asList(containerClientId, rowKeyString)).toString();
}
}
return containerClientId;
}
public void restoreOrigValue(FacesContext faces) {
String var = getVar();
if (var != null) {
final Map<String, Object> attrs = getVariablesMap(faces);
if (!this.originalVarValues.isEmpty()) {
attrs.put(var, this.originalVarValues.pop());
} else {
attrs.remove(var);
}
}
String iterationStatusVar = getStatus();
if (iterationStatusVar != null) {
Map<String, Object> variablesMap = getVariablesMap(faces);
variablesMap.remove(iterationStatusVar);
}
}
@Override
public void setValueExpression(String name, ValueExpression binding) {
if ("value".equals(name)) {
resetDataModel();
}
if ("var".equals(name)
|| "rowKeyVar".equals(name)
|| "stateVar".equals(name)) {
throw new IllegalArgumentException(MessageFormat.format("{0} cannot be EL-expression", name));
}
super.setValueExpression(name, binding);
}
protected boolean keepSaved(FacesContext context) {
final FacesMessage.Severity maximumSeverity = context.getMaximumSeverity();
return (maximumSeverity != null) && (FacesMessage.SEVERITY_ERROR.compareTo(maximumSeverity) <= 0);
}
public void walk(FacesContext faces, DataVisitor visitor) throws IOException {
getDataModelWrapper().walk(faces, visitor);
restoreOrigValue(faces);
}
@Override
public void setId(String id) {
super.setId(id);
this.containerClientId = null;
}
public void resetDataModel() {
this.dataModelWrapper = null;
}
protected void resetChildState() {
getStateHelper().remove(PropertyKeys.childState);
}
private void resetState() {
this.dataModelWrapper = null;
if (!keepSaved(getFacesContext())) {
resetChildState();
}
}
@Override
public Object saveState(FacesContext context) {
final Object parentState = super.saveState(context);
if (initialStateMarked() && parentState == null) {
return null;
}
return new Object[]{parentState};
}
@Override
public void restoreState(FacesContext context, Object stateObject) {
if (stateObject == null) {
return;
}
super.restoreState(context, ((Object[]) stateObject)[0]);
}
protected boolean matchesBaseId(String clientId, String baseId, char separatorChar) {
return clientId.equals(baseId)
|| clientId.startsWith(baseId)
&& (clientId.length() > baseId.length())
&& (clientId.charAt(baseId.length()) == separatorChar);
}
@Override
public boolean invokeOnComponent(FacesContext context, String clientId, ContextCallback callback) throws FacesException {
if ((null == context) || (null == clientId) || (null == callback)) {
throw new NullPointerException();
}
final String baseId = getClientId(context);
if (!matchesBaseId(clientId, baseId, separatorChar)) {
return false;
}
boolean found = false;
Integer oldRowKey = getRowKey();
try {
setRowKey(context, null);
if (clientId.equals(baseId)) {
callback.invokeContextCallback(context, this);
found = true;
}
if (!found) {
setRowKey(context, null);
if (isRowAvailable()) {
Iterator<UIComponent> dataChildrenItr = dataChildren();
while (dataChildrenItr.hasNext() && !found) {
UIComponent dataChild = dataChildrenItr.next();
found = dataChild.invokeOnComponent(context, clientId, callback);
}
}
}
} catch (Exception e) {
throw new FacesException(e);
} finally {
try {
setRowKey(context, oldRowKey);
restoreOrigValue(context);
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage(), e);
}
}
return found;
}
private boolean visitComponents(Iterator<UIComponent> components, VisitContext context, VisitCallback callback) {
while (components.hasNext()) {
UIComponent nextChild = components.next();
if (nextChild.visitTree(context, callback)) {
return true;
}
}
return false;
}
protected boolean visitDataChildren(VisitContext visitContext, VisitCallback callback, boolean visitRows) throws IOException {
if (visitRows) {
final FacesContext facesContext = visitContext.getFacesContext();
final ChildrenTreeDataVisitor dataVisitor = new ChildrenTreeDataVisitor(callback, visitContext, this);
this.walk(facesContext, dataVisitor);
return dataVisitor.getVisitResult();
} else {
return visitComponents(getFacetsAndChildren(), visitContext, callback);
}
}
/**
* Copied from Richfaces UIDataAdapter#visitTree.
*/
@Override
public boolean visitTree(VisitContext visitContext, VisitCallback callback) {
// First check to see whether we are visitable. If not
// short-circuit out of this subtree, though allow the
// visit to proceed through to other subtrees.
if (!isVisitable(visitContext)) {
return false;
}
// Clear out the row index is one is set so that
// we start from a clean slate.
FacesContext facesContext = visitContext.getFacesContext();
// NOTE: that the visitRows local will be obsolete once the
// appropriate visit hints have been added to the API
boolean visitRows = requiresRowIteration(visitContext);
Integer oldRowKey = null;
if (visitRows) {
oldRowKey = getRowKey();
setRowKey(facesContext, null);
}
// Push ourselves to EL
pushComponentToEL(facesContext, null);
try {
// Visit ourselves. Note that we delegate to the
// VisitContext to actually perform the visit.
VisitResult result = visitContext.invokeVisitCallback(this, callback);
// If the visit is complete, short-circuit out and end the visit
if (result == VisitResult.COMPLETE) {
return true;
}
// Visit children, short-circuiting as necessary
if ((result == VisitResult.ACCEPT)) {
if (visitDataChildren(visitContext, callback, visitRows)) {
return true;
}
}
} catch (IOException e) {
// TODO handle exception
LOG.log(Level.SEVERE, e.getMessage(), e);
} finally {
// Clean up - pop EL and restore old row index
popComponentFromEL(facesContext);
if (visitRows) {
try {
setRowKey(facesContext, oldRowKey);
restoreOrigValue(facesContext);
} catch (Exception e) {
// TODO: handle exception
LOG.log(Level.SEVERE, e.getMessage(), e);
}
}
}
// Return false to allow the visit to continue
return false;
}
private boolean requiresRowIteration(VisitContext context) {
return !context.getHints().contains(VisitHint.SKIP_ITERATION);
}
@Override
public void processEvent(ComponentSystemEvent event) throws AbortProcessingException {
this.processEvent((SystemEvent) event);
}
@Override
public void processEvent(SystemEvent event) throws AbortProcessingException {
if (event instanceof PostAddToViewEvent) {
subscribeToPreRenderViewEventOncePerRequest();
} else if (event instanceof PostRestoreStateEvent) {
subscribeToPreRenderViewEventOncePerRequest();
resetState();
} else if (event instanceof PreRenderViewEvent) {
resetState();
}
}
@Override
public void processDecodes(FacesContext faces) {
if (!this.isRendered()) {
return;
}
pushComponentToEL(faces, this);
this.walkThroughChildren(faces, new ChildrenComponentVisitor(this) {
@Override
public void processComponent(FacesContext context, UIComponent component) {
component.processDecodes(context);
}
});
this.decode(faces);
popComponentFromEL(faces);
}
private void walkThroughChildren(FacesContext faces, ChildrenComponentVisitor visitor) {
if (!this.isRendered()) {
return;
}
final String var = getVar();
if (var != null) {
Map<String, Object> attrs = getVariablesMap(faces);
this.originalVarValues.push(attrs.get(var));
}
this.setRowKey(faces, null);
try {
walk(faces, visitor);
} catch (Exception e) {
throw new FacesException(e);
} finally {
this.setRowKey(faces, null);
this.restoreOrigValue(faces);
}
}
private void subscribeToPreRenderViewEventOncePerRequest() {
final FacesContext facesContext = getFacesContext();
final Map<Object, Object> contextMap = facesContext.getAttributes();
if (contextMap.get(this.getClientId() + PRE_RENDER_VIEW_EVENT_REGISTERED) == null) {
contextMap.put(this.getClientId() + PRE_RENDER_VIEW_EVENT_REGISTERED, Boolean.TRUE);
UIViewRoot viewRoot = facesContext.getViewRoot();
viewRoot.subscribeToViewEvent(PreRenderViewEvent.class, this);
}
}
@Override
public boolean isListenerForSource(Object source) {
return this.equals(source) || source instanceof UIViewRoot;
}
}
| components/src/main/java/de/larmic/butterfaces/component/html/repeat/UIDataAdaptor.java | /*
* Copyright Lars Michaelis and Stephan Zerhusen 2016.
* Distributed under the MIT License.
* (See accompanying file README.md file or copy at http://opensource.org/licenses/MIT)
*/
package de.larmic.butterfaces.component.html.repeat;
import de.larmic.butterfaces.component.html.repeat.event.RowKeyEventBroadcaster;
import de.larmic.butterfaces.component.html.repeat.event.RowKeyFacesEvent;
import de.larmic.butterfaces.component.html.repeat.model.DataModelWrapper;
import de.larmic.butterfaces.component.html.repeat.model.DataModelWrapperFactory;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenComponentVisitor;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenTreeDataVisitor;
import de.larmic.butterfaces.component.html.repeat.visitor.ChildrenTreeDataVisitorCallback;
import de.larmic.butterfaces.component.html.repeat.visitor.DataVisitor;
import de.larmic.butterfaces.util.StringJoiner;
import javax.el.ValueExpression;
import javax.faces.FacesException;
import javax.faces.application.FacesMessage;
import javax.faces.component.*;
import javax.faces.component.visit.VisitCallback;
import javax.faces.component.visit.VisitContext;
import javax.faces.component.visit.VisitHint;
import javax.faces.component.visit.VisitResult;
import javax.faces.context.FacesContext;
import javax.faces.event.*;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author Lars Michaelis
*/
public abstract class UIDataAdaptor extends UIComponentBase implements NamingContainer, UniqueIdVendor, ComponentSystemEventListener, SystemEventListener, ChildrenTreeDataVisitorCallback {
private String PRE_RENDER_VIEW_EVENT_REGISTERED = UIDataAdaptor.class.getName() + ":preRenderViewEventRegistered";
private static final Logger LOG = Logger.getLogger(UIDataAdaptor.class.getName());
protected final char separatorChar;
private DataModelWrapper<?> dataModelWrapper = null;
private Integer rowKey = null;
private String containerClientId;
Stack<Object> originalVarValues = new Stack<>();
private enum PropertyKeys {
lastId, var, stateVar, childState, first, rows, value, status
}
public UIDataAdaptor() {
this.subscribeToEvent(PostAddToViewEvent.class, this);
this.subscribeToEvent(PostRestoreStateEvent.class, this);
separatorChar = UINamingContainer.getSeparatorChar(FacesContext.getCurrentInstance());
}
protected Map<String, Object> getVariablesMap(FacesContext facesContext) {
return facesContext.getExternalContext().getRequestMap();
}
public String createUniqueId(FacesContext context, String seed) {
Integer i = (Integer) getStateHelper().get(PropertyKeys.lastId);
int lastId = (i != null) ? i : 0;
getStateHelper().put(PropertyKeys.lastId, ++lastId);
return UIViewRoot.UNIQUE_ID_PREFIX + ((seed == null) ? lastId : seed);
}
public Integer getRowKey() {
return rowKey;
}
@Override
public void setRowKey(FacesContext facesContext, Integer rowKey) {
this.saveChildState(facesContext);
if (rowKey != null) {
System.out.println("Setting rowkey to " + rowKey);
} else {
System.out.println("Setting rowkey to null");
}
this.rowKey = rowKey;
final int rowKeyAsInt = rowKey != null ? rowKey : -1;
getDataModelWrapper().setRowIndex(rowKeyAsInt);
this.containerClientId = null;
boolean rowSelected = (rowKey != null) && isRowAvailable();
setupVariable(facesContext, rowSelected);
this.restoreChildState(facesContext);
}
protected void saveChildState(FacesContext facesContext) {
final Iterator<UIComponent> itr = dataChildren();
while (itr.hasNext()) {
this.saveChildState(facesContext, itr.next());
}
}
protected void saveChildState(FacesContext facesContext, UIComponent component) {
if (component.isTransient()) {
return;
}
ChildStateHolder childStateHolder = null;
if (component instanceof EditableValueHolder) {
EditableValueHolder evh = (EditableValueHolder) component;
childStateHolder = new ChildStateHolder(evh);
} else if (component instanceof UIForm) {
UIForm form = (UIForm) component;
childStateHolder = new ChildStateHolder(form);
}
if (childStateHolder != null) {
getStateHelper().put(PropertyKeys.childState, component.getClientId(facesContext), childStateHolder);
}
if (component.getChildCount() > 0) {
for (UIComponent child : component.getChildren()) {
saveChildState(facesContext, child);
}
}
if (component.getFacetCount() > 0) {
for (UIComponent facet : component.getFacets().values()) {
saveChildState(facesContext, facet);
}
}
}
public Iterator<UIComponent> dataChildren() {
if (getChildCount() > 0) {
return getChildren().iterator();
} else {
return Collections.<UIComponent>emptyList().iterator();
}
}
protected void restoreChildState(FacesContext facesContext) {
Iterator<UIComponent> itr = dataChildren();
while (itr.hasNext()) {
this.restoreChildState(facesContext, itr.next());
}
}
protected void restoreChildState(FacesContext facesContext, UIComponent component) {
String id = component.getId();
component.setId(id); // Forces client id to be reset
ChildStateHolder childStateHolder = null;
Map<String, ChildStateHolder> savedStatesMap = (Map<String, ChildStateHolder>) getStateHelper().get(PropertyKeys.childState);
if (savedStatesMap != null) {
childStateHolder = savedStatesMap.get(component.getClientId(facesContext));
}
if (childStateHolder == null) {
childStateHolder = ChildStateHolder.EMPTY;
}
if (component instanceof EditableValueHolder) {
EditableValueHolder evh = (EditableValueHolder) component;
childStateHolder.apply(evh);
} else if (component instanceof UIForm) {
UIForm form = (UIForm) component;
childStateHolder.apply(form);
}
if (component.getChildCount() > 0) {
for (UIComponent child : component.getChildren()) {
restoreChildState(facesContext, child);
}
}
if (component.getFacetCount() > 0) {
for (UIComponent facet : component.getFacets().values()) {
restoreChildState(facesContext, facet);
}
}
}
protected FacesEvent wrapEvent(FacesEvent event) {
return new RowKeyFacesEvent(this, event, getRowKey());
}
@Override
public void queueEvent(FacesEvent event) {
super.queueEvent(wrapEvent(event));
}
@Override
public void broadcast(FacesEvent event) throws AbortProcessingException {
if (event instanceof RowKeyFacesEvent) {
RowKeyEventBroadcaster.broadcast(getFacesContext(), (RowKeyFacesEvent) event);
} else {
super.broadcast(event);
}
}
protected DataModelWrapper<?> getDataModelWrapper() {
if (dataModelWrapper == null) {
dataModelWrapper = DataModelWrapperFactory.createDataModelWrapper(getValue());
}
return dataModelWrapper;
}
public int getRowIndex() {
return getDataModelWrapper().getRowIndex();
}
public String getVar() {
return (String) getStateHelper().get(PropertyKeys.var);
}
public void setVar(String var) {
getStateHelper().put(PropertyKeys.var, var);
}
public Object getValue() {
return getStateHelper().eval(PropertyKeys.value);
}
public void setValue(Object value) {
resetDataModel();
getStateHelper().put(PropertyKeys.value, value);
}
public String getStatus() {
return (String) getStateHelper().get(PropertyKeys.status);
}
public void setStatus(String status) {
getStateHelper().put(PropertyKeys.status, status);
}
public int getRowCount() {
return getDataModelWrapper().getRowCount();
}
public Object getRowData() {
return getDataModelWrapper().getRowData();
}
public boolean isRowAvailable() {
return getDataModelWrapper().isRowAvailable();
}
protected void setupVariable(FacesContext faces, boolean rowSelected) {
Map<String, Object> attrs = getVariablesMap(faces);
if (rowSelected) {
setupVariable(getVar(), attrs, getRowData());
} else {
removeVariable(getVar(), attrs);
}
String iterationStatusVar = getStatus();
if (iterationStatusVar != null) {
Map<String, Object> requestMap = getVariablesMap(faces);
if (rowSelected) {
RowStatus iterationStatus = new RowStatus(getRowIndex(), getRowCount());
requestMap.put(iterationStatusVar, iterationStatus);
} else {
requestMap.remove(iterationStatusVar);
}
}
}
private void setupVariable(String var, Map<String, Object> attrs, Object rowData) {
if (var != null) {
attrs.put(var, rowData);
}
}
private void removeVariable(String var, Map<String, Object> attrs) {
if (var != null) {
attrs.remove(var);
}
}
@Override
public String getContainerClientId(FacesContext facesContext) {
if (facesContext == null) {
throw new NullPointerException("context");
}
if (null == containerClientId) {
containerClientId = super.getContainerClientId(facesContext);
final Object rowKey = getRowKey();
if (rowKey != null) {
String rowKeyString = rowKey.toString();
containerClientId = StringJoiner.on(separatorChar).join(Arrays.asList(containerClientId, rowKeyString)).toString();
}
}
return containerClientId;
}
public void restoreOrigValue(FacesContext faces) {
String var = getVar();
if (var != null) {
final Map<String, Object> attrs = getVariablesMap(faces);
if (!this.originalVarValues.isEmpty()) {
attrs.put(var, this.originalVarValues.pop());
} else {
attrs.remove(var);
}
}
String iterationStatusVar = getStatus();
if (iterationStatusVar != null) {
Map<String, Object> variablesMap = getVariablesMap(faces);
variablesMap.remove(iterationStatusVar);
}
}
@Override
public void setValueExpression(String name, ValueExpression binding) {
if ("value".equals(name)) {
resetDataModel();
}
if ("var".equals(name)
|| "rowKeyVar".equals(name)
|| "stateVar".equals(name)) {
throw new IllegalArgumentException(MessageFormat.format("{0} cannot be EL-expression", name));
}
super.setValueExpression(name, binding);
}
protected boolean keepSaved(FacesContext context) {
final FacesMessage.Severity maximumSeverity = context.getMaximumSeverity();
return (maximumSeverity != null) && (FacesMessage.SEVERITY_ERROR.compareTo(maximumSeverity) <= 0);
}
public void walk(FacesContext faces, DataVisitor visitor) throws IOException {
getDataModelWrapper().walk(faces, visitor);
restoreOrigValue(faces);
}
@Override
public void setId(String id) {
super.setId(id);
this.containerClientId = null;
}
public void resetDataModel() {
this.dataModelWrapper = null;
}
protected void resetChildState() {
getStateHelper().remove(PropertyKeys.childState);
}
private void resetState() {
this.dataModelWrapper = null;
if (!keepSaved(getFacesContext())) {
resetChildState();
}
}
@Override
public Object saveState(FacesContext context) {
final Object parentState = super.saveState(context);
if (initialStateMarked() && parentState == null) {
return null;
}
return new Object[]{parentState};
}
@Override
public void restoreState(FacesContext context, Object stateObject) {
if (stateObject == null) {
return;
}
super.restoreState(context, ((Object[]) stateObject)[0]);
}
protected boolean matchesBaseId(String clientId, String baseId, char separatorChar) {
return clientId.equals(baseId)
|| clientId.startsWith(baseId)
&& (clientId.length() > baseId.length())
&& (clientId.charAt(baseId.length()) == separatorChar);
}
@Override
public boolean invokeOnComponent(FacesContext context, String clientId, ContextCallback callback) throws FacesException {
if ((null == context) || (null == clientId) || (null == callback)) {
throw new NullPointerException();
}
final String baseId = getClientId(context);
if (!matchesBaseId(clientId, baseId, separatorChar)) {
return false;
}
boolean found = false;
Integer oldRowKey = getRowKey();
try {
setRowKey(context, null);
if (clientId.equals(baseId)) {
callback.invokeContextCallback(context, this);
found = true;
}
if (!found) {
setRowKey(context, null);
if (isRowAvailable()) {
Iterator<UIComponent> dataChildrenItr = dataChildren();
while (dataChildrenItr.hasNext() && !found) {
UIComponent dataChild = dataChildrenItr.next();
found = dataChild.invokeOnComponent(context, clientId, callback);
}
}
}
} catch (Exception e) {
throw new FacesException(e);
} finally {
try {
setRowKey(context, oldRowKey);
restoreOrigValue(context);
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage(), e);
}
}
return found;
}
private boolean visitComponents(Iterator<UIComponent> components, VisitContext context, VisitCallback callback) {
while (components.hasNext()) {
UIComponent nextChild = components.next();
if (nextChild.visitTree(context, callback)) {
return true;
}
}
return false;
}
protected boolean visitDataChildren(VisitContext visitContext, VisitCallback callback, boolean visitRows) throws IOException {
if (visitRows) {
final FacesContext facesContext = visitContext.getFacesContext();
final ChildrenTreeDataVisitor dataVisitor = new ChildrenTreeDataVisitor(callback, visitContext, this);
this.walk(facesContext, dataVisitor);
return dataVisitor.getVisitResult();
} else {
return visitComponents(getFacetsAndChildren(), visitContext, callback);
}
}
/**
* Copied from Richfaces UIDataAdapter#visitTree.
*/
@Override
public boolean visitTree(VisitContext visitContext, VisitCallback callback) {
// First check to see whether we are visitable. If not
// short-circuit out of this subtree, though allow the
// visit to proceed through to other subtrees.
if (!isVisitable(visitContext)) {
return false;
}
// Clear out the row index is one is set so that
// we start from a clean slate.
FacesContext facesContext = visitContext.getFacesContext();
// NOTE: that the visitRows local will be obsolete once the
// appropriate visit hints have been added to the API
boolean visitRows = requiresRowIteration(visitContext);
Integer oldRowKey = null;
if (visitRows) {
oldRowKey = getRowKey();
setRowKey(facesContext, null);
}
// Push ourselves to EL
pushComponentToEL(facesContext, null);
try {
// Visit ourselves. Note that we delegate to the
// VisitContext to actually perform the visit.
VisitResult result = visitContext.invokeVisitCallback(this, callback);
// If the visit is complete, short-circuit out and end the visit
if (result == VisitResult.COMPLETE) {
return true;
}
// Visit children, short-circuiting as necessary
if ((result == VisitResult.ACCEPT)) {
if (visitDataChildren(visitContext, callback, visitRows)) {
return true;
}
}
} catch (IOException e) {
// TODO handle exception
LOG.log(Level.SEVERE, e.getMessage(), e);
} finally {
// Clean up - pop EL and restore old row index
popComponentFromEL(facesContext);
if (visitRows) {
try {
setRowKey(facesContext, oldRowKey);
restoreOrigValue(facesContext);
} catch (Exception e) {
// TODO: handle exception
LOG.log(Level.SEVERE, e.getMessage(), e);
}
}
}
// Return false to allow the visit to continue
return false;
}
private boolean requiresRowIteration(VisitContext context) {
return !context.getHints().contains(VisitHint.SKIP_ITERATION);
}
@Override
public void processEvent(ComponentSystemEvent event) throws AbortProcessingException {
this.processEvent((SystemEvent) event);
}
@Override
public void processEvent(SystemEvent event) throws AbortProcessingException {
if (event instanceof PostAddToViewEvent) {
subscribeToPreRenderViewEventOncePerRequest();
} else if (event instanceof PostRestoreStateEvent) {
subscribeToPreRenderViewEventOncePerRequest();
resetState();
} else if (event instanceof PreRenderViewEvent) {
resetState();
}
}
@Override
public void processDecodes(FacesContext faces) {
if (!this.isRendered()) {
return;
}
pushComponentToEL(faces, this);
this.walkThroughChildren(faces, new ChildrenComponentVisitor(this) {
@Override
public void processComponent(FacesContext context, UIComponent component) {
component.processDecodes(context);
}
});
this.decode(faces);
popComponentFromEL(faces);
}
private void walkThroughChildren(FacesContext faces, ChildrenComponentVisitor visitor) {
if (!this.isRendered()) {
return;
}
final String var = getVar();
if (var != null) {
Map<String, Object> attrs = getVariablesMap(faces);
this.originalVarValues.push(attrs.get(var));
}
this.setRowKey(faces, null);
try {
walk(faces, visitor);
} catch (Exception e) {
throw new FacesException(e);
} finally {
this.setRowKey(faces, null);
this.restoreOrigValue(faces);
}
}
private void subscribeToPreRenderViewEventOncePerRequest() {
final FacesContext facesContext = getFacesContext();
final Map<Object, Object> contextMap = facesContext.getAttributes();
if (contextMap.get(this.getClientId() + PRE_RENDER_VIEW_EVENT_REGISTERED) == null) {
contextMap.put(this.getClientId() + PRE_RENDER_VIEW_EVENT_REGISTERED, Boolean.TRUE);
UIViewRoot viewRoot = facesContext.getViewRoot();
viewRoot.subscribeToViewEvent(PreRenderViewEvent.class, this);
}
}
@Override
public boolean isListenerForSource(Object source) {
return this.equals(source) || source instanceof UIViewRoot;
}
}
| #40 Table: remove log output
| components/src/main/java/de/larmic/butterfaces/component/html/repeat/UIDataAdaptor.java | #40 Table: remove log output | <ide><path>omponents/src/main/java/de/larmic/butterfaces/component/html/repeat/UIDataAdaptor.java
<ide> public void setRowKey(FacesContext facesContext, Integer rowKey) {
<ide> this.saveChildState(facesContext);
<ide>
<del> if (rowKey != null) {
<del> System.out.println("Setting rowkey to " + rowKey);
<del> } else {
<del> System.out.println("Setting rowkey to null");
<del> }
<del>
<ide> this.rowKey = rowKey;
<ide> final int rowKeyAsInt = rowKey != null ? rowKey : -1;
<ide> |
|
Java | apache-2.0 | 71be7ca0f52a323a10ef9fbcc5ee952b8db18f85 | 0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.annotate.AnnotationProvider;
import com.intellij.openapi.vcs.annotate.FileAnnotation;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.intellij.util.ObjectUtils.assertNotNull;
public class AnnotateLocalFileAction {
private static final Logger LOG = Logger.getInstance(AnnotateLocalFileAction.class);
private static boolean isEnabled(@NotNull AnActionEvent e) {
Project project = e.getProject();
if (project == null || project.isDisposed()) return false;
VirtualFile file = e.getData(CommonDataKeys.VIRTUAL_FILE);
if (file == null || file.isDirectory() || file.getFileType().isBinary()) return false;
final AbstractVcs vcs = ProjectLevelVcsManager.getInstance(project).getVcsFor(file);
if (vcs == null) return false;
final AnnotationProvider annotationProvider = vcs.getAnnotationProvider();
if (annotationProvider == null) return false;
FileStatus fileStatus = ChangeListManager.getInstance(project).getStatus(file);
if (fileStatus == FileStatus.UNKNOWN || fileStatus == FileStatus.ADDED || fileStatus == FileStatus.IGNORED) {
return false;
}
return true;
}
private static boolean isSuspended(@NotNull AnActionEvent e) {
VirtualFile file = e.getRequiredData(CommonDataKeys.VIRTUAL_FILE);
return VcsAnnotateUtil.getBackgroundableLock(e.getRequiredData(CommonDataKeys.PROJECT), file).isLocked();
}
private static boolean isAnnotated(@NotNull AnActionEvent e) {
List<Editor> editors = getEditors(e.getDataContext());
return ContainerUtil.exists(editors, editor -> editor.getGutter().isAnnotationsShown());
}
private static void perform(AnActionEvent e, boolean selected) {
if (!selected) {
List<Editor> editors = getEditors(e.getDataContext());
for (Editor editor : editors) {
editor.getGutter().closeAllAnnotations();
}
}
else {
Project project = assertNotNull(e.getProject());
Editor editor = e.getData(CommonDataKeys.EDITOR);
if (editor == null) {
VirtualFile selectedFile = e.getRequiredData(CommonDataKeys.VIRTUAL_FILE);
FileEditor[] fileEditors = FileEditorManager.getInstance(project).openFile(selectedFile, false);
for (FileEditor fileEditor : fileEditors) {
if (fileEditor instanceof TextEditor) {
editor = ((TextEditor)fileEditor).getEditor();
}
}
if (editor == null) {
Messages.showErrorDialog(project, "Can't create text editor for " + selectedFile.getPresentableUrl(),
VcsBundle.message("message.title.annotate"));
LOG.warn(String.format("Can't create text editor for file: valid - %s; file type - %s; editors - %s",
selectedFile.isValid(), selectedFile.getFileType().getName(), Arrays.toString(fileEditors)));
return;
}
}
doAnnotate(editor, project);
}
}
private static void doAnnotate(@NotNull final Editor editor, @NotNull final Project project) {
final VirtualFile file = FileDocumentManager.getInstance().getFile(editor.getDocument());
if (file == null) return;
final AbstractVcs vcs = ProjectLevelVcsManager.getInstance(project).getVcsFor(file);
if (vcs == null) return;
final AnnotationProvider annotationProvider = vcs.getAnnotationProvider();
assert annotationProvider != null;
final Ref<FileAnnotation> fileAnnotationRef = new Ref<>();
final Ref<VcsException> exceptionRef = new Ref<>();
VcsAnnotateUtil.getBackgroundableLock(project, file).lock();
final Task.Backgroundable annotateTask = new Task.Backgroundable(project, VcsBundle.message("retrieving.annotations"), true) {
@Override
public void run(final @NotNull ProgressIndicator indicator) {
try {
fileAnnotationRef.set(annotationProvider.annotate(file));
}
catch (VcsException e) {
exceptionRef.set(e);
}
catch (ProcessCanceledException pce) {
throw pce;
}
catch (Throwable t) {
exceptionRef.set(new VcsException(t));
}
}
@Override
public void onCancel() {
onSuccess();
}
@Override
public void onSuccess() {
VcsAnnotateUtil.getBackgroundableLock(project, file).unlock();
if (!exceptionRef.isNull()) {
LOG.warn(exceptionRef.get());
AbstractVcsHelper.getInstance(project).showErrors(Collections.singletonList(exceptionRef.get()), VcsBundle.message("message.title.annotate"));
}
if (!fileAnnotationRef.isNull()) {
AnnotateToggleAction.doAnnotate(editor, project, fileAnnotationRef.get(), vcs);
}
}
};
ProgressManager.getInstance().run(annotateTask);
}
@NotNull
private static List<Editor> getEditors(@NotNull DataContext context) {
Editor editor = context.getData(CommonDataKeys.EDITOR);
if (editor != null) return Collections.singletonList(editor);
Project project = context.getData(CommonDataKeys.PROJECT);
VirtualFile file = context.getData(CommonDataKeys.VIRTUAL_FILE);
if (project == null || file == null) return Collections.emptyList();
return VcsAnnotateUtil.getEditors(project, file);
}
public static class Provider implements AnnotateToggleAction.Provider {
@Override
public boolean isEnabled(AnActionEvent e) {
return AnnotateLocalFileAction.isEnabled(e);
}
@Override
public boolean isSuspended(@NotNull AnActionEvent e) {
return AnnotateLocalFileAction.isSuspended(e);
}
@Override
public boolean isAnnotated(AnActionEvent e) {
return AnnotateLocalFileAction.isAnnotated(e);
}
@Override
public void perform(@NotNull AnActionEvent e, boolean selected) {
AnnotateLocalFileAction.perform(e, selected);
}
}
}
| platform/vcs-impl/src/com/intellij/openapi/vcs/actions/AnnotateLocalFileAction.java | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.annotate.AnnotationProvider;
import com.intellij.openapi.vcs.annotate.FileAnnotation;
import com.intellij.openapi.vcs.changes.ChangeListManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.intellij.util.ObjectUtils.assertNotNull;
public class AnnotateLocalFileAction {
private static final Logger LOG = Logger.getInstance(AnnotateLocalFileAction.class);
private static boolean isEnabled(@NotNull AnActionEvent e) {
Project project = e.getProject();
if (project == null || project.isDisposed()) return false;
VirtualFile file = e.getData(CommonDataKeys.VIRTUAL_FILE);
if (file == null || file.isDirectory() || file.getFileType().isBinary()) return false;
final AbstractVcs vcs = ProjectLevelVcsManager.getInstance(project).getVcsFor(file);
if (vcs == null) return false;
final AnnotationProvider annotationProvider = vcs.getAnnotationProvider();
if (annotationProvider == null) return false;
FileStatus fileStatus = ChangeListManager.getInstance(project).getStatus(file);
if (fileStatus == FileStatus.UNKNOWN || fileStatus == FileStatus.ADDED || fileStatus == FileStatus.IGNORED) {
return false;
}
return true;
}
private static boolean isSuspended(@NotNull AnActionEvent e) {
VirtualFile file = e.getRequiredData(CommonDataKeys.VIRTUAL_FILE);
return VcsAnnotateUtil.getBackgroundableLock(e.getRequiredData(CommonDataKeys.PROJECT), file).isLocked();
}
private static boolean isAnnotated(@NotNull AnActionEvent e) {
List<Editor> editors = getEditors(e.getDataContext());
return ContainerUtil.exists(editors, editor -> editor.getGutter().isAnnotationsShown());
}
private static void perform(AnActionEvent e, boolean selected) {
if (!selected) {
List<Editor> editors = getEditors(e.getDataContext());
for (Editor editor : editors) {
editor.getGutter().closeAllAnnotations();
}
}
else {
Project project = assertNotNull(e.getProject());
Editor editor = e.getData(CommonDataKeys.EDITOR);
if (editor == null) {
VirtualFile selectedFile = e.getRequiredData(CommonDataKeys.VIRTUAL_FILE);
FileEditor[] fileEditors = FileEditorManager.getInstance(project).openFile(selectedFile, false);
for (FileEditor fileEditor : fileEditors) {
if (fileEditor instanceof TextEditor) {
editor = ((TextEditor)fileEditor).getEditor();
}
}
if (editor == null) {
LOG.error(String.format("Can't create text editor for file: valid - %s; file type - %s; editors - %s",
selectedFile.isValid(), selectedFile.getFileType(), Arrays.toString(fileEditors)));
return;
}
}
doAnnotate(editor, project);
}
}
private static void doAnnotate(@NotNull final Editor editor, @NotNull final Project project) {
final VirtualFile file = FileDocumentManager.getInstance().getFile(editor.getDocument());
if (file == null) return;
final AbstractVcs vcs = ProjectLevelVcsManager.getInstance(project).getVcsFor(file);
if (vcs == null) return;
final AnnotationProvider annotationProvider = vcs.getAnnotationProvider();
assert annotationProvider != null;
final Ref<FileAnnotation> fileAnnotationRef = new Ref<>();
final Ref<VcsException> exceptionRef = new Ref<>();
VcsAnnotateUtil.getBackgroundableLock(project, file).lock();
final Task.Backgroundable annotateTask = new Task.Backgroundable(project, VcsBundle.message("retrieving.annotations"), true) {
@Override
public void run(final @NotNull ProgressIndicator indicator) {
try {
fileAnnotationRef.set(annotationProvider.annotate(file));
}
catch (VcsException e) {
exceptionRef.set(e);
}
catch (ProcessCanceledException pce) {
throw pce;
}
catch (Throwable t) {
exceptionRef.set(new VcsException(t));
}
}
@Override
public void onCancel() {
onSuccess();
}
@Override
public void onSuccess() {
VcsAnnotateUtil.getBackgroundableLock(project, file).unlock();
if (!exceptionRef.isNull()) {
LOG.warn(exceptionRef.get());
AbstractVcsHelper.getInstance(project).showErrors(Collections.singletonList(exceptionRef.get()), VcsBundle.message("message.title.annotate"));
}
if (!fileAnnotationRef.isNull()) {
AnnotateToggleAction.doAnnotate(editor, project, fileAnnotationRef.get(), vcs);
}
}
};
ProgressManager.getInstance().run(annotateTask);
}
@NotNull
private static List<Editor> getEditors(@NotNull DataContext context) {
Editor editor = context.getData(CommonDataKeys.EDITOR);
if (editor != null) return Collections.singletonList(editor);
Project project = context.getData(CommonDataKeys.PROJECT);
VirtualFile file = context.getData(CommonDataKeys.VIRTUAL_FILE);
if (project == null || file == null) return Collections.emptyList();
return VcsAnnotateUtil.getEditors(project, file);
}
public static class Provider implements AnnotateToggleAction.Provider {
@Override
public boolean isEnabled(AnActionEvent e) {
return AnnotateLocalFileAction.isEnabled(e);
}
@Override
public boolean isSuspended(@NotNull AnActionEvent e) {
return AnnotateLocalFileAction.isSuspended(e);
}
@Override
public boolean isAnnotated(AnActionEvent e) {
return AnnotateLocalFileAction.isAnnotated(e);
}
@Override
public void perform(@NotNull AnActionEvent e, boolean selected) {
AnnotateLocalFileAction.perform(e, selected);
}
}
}
| EA-135766 - assert: AnnotateLocalFileAction.perform
Do not report error - IpnbFileType "Jupyter Notebook" is a text file
without available TextFileEditor.
| platform/vcs-impl/src/com/intellij/openapi/vcs/actions/AnnotateLocalFileAction.java | EA-135766 - assert: AnnotateLocalFileAction.perform | <ide><path>latform/vcs-impl/src/com/intellij/openapi/vcs/actions/AnnotateLocalFileAction.java
<ide> import com.intellij.openapi.progress.ProgressManager;
<ide> import com.intellij.openapi.progress.Task;
<ide> import com.intellij.openapi.project.Project;
<add>import com.intellij.openapi.ui.Messages;
<ide> import com.intellij.openapi.util.Ref;
<ide> import com.intellij.openapi.vcs.*;
<ide> import com.intellij.openapi.vcs.annotate.AnnotationProvider;
<ide> }
<ide>
<ide> if (editor == null) {
<del> LOG.error(String.format("Can't create text editor for file: valid - %s; file type - %s; editors - %s",
<del> selectedFile.isValid(), selectedFile.getFileType(), Arrays.toString(fileEditors)));
<add> Messages.showErrorDialog(project, "Can't create text editor for " + selectedFile.getPresentableUrl(),
<add> VcsBundle.message("message.title.annotate"));
<add> LOG.warn(String.format("Can't create text editor for file: valid - %s; file type - %s; editors - %s",
<add> selectedFile.isValid(), selectedFile.getFileType().getName(), Arrays.toString(fileEditors)));
<ide>
<ide> return;
<ide> } |
|
Java | apache-2.0 | 7aca39f753e86e9885b81822d77ba4312d959c2d | 0 | ywjno/nutz,nutzam/nutz,ywjno/nutz,nutzam/nutz,elkan1788/nutz,elkan1788/nutz,nutzam/nutz,nutzam/nutz,elkan1788/nutz,nutzam/nutz,ywjno/nutz,elkan1788/nutz,ywjno/nutz,ywjno/nutz | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh([email protected])
* @author Wendal([email protected])
* @author bonyfish([email protected])
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| src/org/nutz/lang/Streams.java | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh([email protected])
* @author Wendal([email protected])
* @author bonyfish([email protected])
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(ops);
safeClose(ins);
}
}
}
| Streams writeAndClose 最好强制刷一下流
| src/org/nutz/lang/Streams.java | Streams writeAndClose 最好强制刷一下流 | <ide><path>rc/org/nutz/lang/Streams.java
<ide> throw Lang.wrapThrow(e);
<ide> }
<ide> finally {
<add> safeFlush(writer);
<ide> safeClose(writer);
<ide> }
<ide> }
<ide> throw Lang.wrapThrow(e);
<ide> }
<ide> finally {
<add> safeFlush(ops);
<ide> safeClose(ops);
<ide> safeClose(ins);
<ide> }
<ide> throw Lang.wrapThrow(e);
<ide> }
<ide> finally {
<add> safeFlush(writer);
<ide> safeClose(writer);
<ide> safeClose(reader);
<ide> }
<ide> throw Lang.wrapThrow(e);
<ide> }
<ide> finally {
<add> safeFlush(ops);
<ide> safeClose(ops);
<ide> }
<ide> }
<ide> throw Lang.wrapThrow(e);
<ide> }
<ide> finally {
<add> safeFlush(ops);
<ide> safeClose(ops);
<ide> safeClose(ins);
<ide> } |
|
Java | epl-1.0 | 87b68149241ef2e00a8ce2e4a227f2f12fd661ee | 0 | smeup/asup,smeup/asup,smeup/asup | /**
* Copyright (c) 2012, 2016 Sme.UP and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
*
* Contributors:
* Mattia Rocchi - Initial API and implementation
* Giuliano Giancristofaro - Implementation
*/
package org.smeup.sys.dk.compiler.rpj;
import javax.inject.Inject;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.smeup.sys.dk.compiler.DevelopmentKitCompilerRuntimeException;
import org.smeup.sys.dk.compiler.QCompilationUnit;
import org.smeup.sys.dk.compiler.QCompilerLinker;
import org.smeup.sys.dk.compiler.QDevelopmentKitCompilerFactory;
import org.smeup.sys.il.core.meta.QFacet;
import org.smeup.sys.il.data.def.QBufferDef;
import org.smeup.sys.il.data.def.QBufferedDataDef;
import org.smeup.sys.il.data.def.QCharacterDef;
import org.smeup.sys.il.data.def.QCompoundDataDef;
import org.smeup.sys.il.data.def.QDataStructDef;
import org.smeup.sys.il.data.def.QMultipleAtomicDataDef;
import org.smeup.sys.il.data.def.QMultipleCompoundDataDef;
import org.smeup.sys.il.data.def.QNumericDef;
import org.smeup.sys.il.data.def.QStrollerDef;
import org.smeup.sys.il.data.term.QDataTerm;
import org.smeup.sys.il.esam.QDataSetTerm;
import org.smeup.sys.il.esam.QDisplayTerm;
import org.smeup.sys.il.esam.QPrintTerm;
import org.smeup.sys.il.expr.IntegratedLanguageExpressionRuntimeException;
import org.smeup.sys.il.flow.QCallableUnit;
import org.smeup.sys.il.flow.QFileSection;
import org.smeup.sys.il.flow.QPrototype;
import org.smeup.sys.os.file.QExternalFile;
public class RPJDataLikeRefactor extends RPJAbstractDataRefactor {
@Inject
public RPJDataLikeRefactor(QCompilationUnit compilationUnit) {
super(compilationUnit);
}
@Override
public boolean visit(QDataTerm<?> target) {
// TODO
if(target.getDataTermType() == null)
return super.visit(target);
if (target.getLike() == null) {
switch (target.getDataTermType()) {
case MULTIPLE_ATOMIC:
case MULTIPLE_COMPOUND:
case UNARY_COMPOUND:
return super.visit(target);
case UNARY_ATOMIC:
if(target instanceof QPrototype)
break;
// search file definition
if(target.getDefinition() instanceof QBufferedDataDef<?>) {
QBufferedDataDef<?> bufferedDataDef = (QBufferedDataDef<?>) target.getDefinition();
if(bufferedDataDef.getLength() == 0) {
if(bufferedDataDef instanceof QBufferDef)
break;
if(bufferedDataDef instanceof QNumericDef)
break;
// from files
QDataTerm<?> displayTerm = getFromFiles(target);
if(displayTerm == null)
displayTerm = getFromModules(target);
if(displayTerm != null) {
completeDefinition(displayTerm, target);
return false;
}
}
}
}
return super.visit(target);
}
String like = target.getLike();
if (like.toUpperCase().startsWith("*IN") && !like.equalsIgnoreCase("*IN"))
like = "*INKA";
QDataTerm<?> source = getCompilationUnit().getDataTerm(like, true);
if (source == null)
throw new IntegratedLanguageExpressionRuntimeException("Invalid liked data term: " + target.getLike());
if (source.getLike() != null)
visit(source);
completeDefinition(source, target);
target.setLike(null);
return false;
}
// TODO move this method in QCompilationUnit
private QDataTerm<?> getFromModules(QDataTerm<?> target) {
for(QCompilationUnit childUnit: getCompilationUnit().getChildCompilationUnits()) {
QDataTerm<?> childDataTerm = childUnit.getDataTerm(target.getName(), true);
if(childDataTerm != null)
return childDataTerm;
}
return null;
}
// TODO move this method in QCompilationUnit
private QDataTerm<?> getFromFiles(QDataTerm<?> target) {
QCallableUnit callableUnit = (QCallableUnit) getCompilationUnit().getNode();
QFileSection fileSection = callableUnit.getFileSection();
if(fileSection == null)
return null;
for(QDisplayTerm displayTerm: fileSection.getDisplays()) {
for(QDataTerm<?> displayElement: displayTerm.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), displayElement.getName()))
return displayElement;
}
}
for(QDataSetTerm dataSet: fileSection.getDataSets()) {
for(QDataTerm<?> dataSetElement: dataSet.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), dataSetElement.getName()))
return dataSetElement;
}
}
for(QPrintTerm printTerm: fileSection.getPrinters()) {
for(QDataTerm<?> printElement: printTerm.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), printElement.getName()))
return printElement;
}
}
return null;
}
@SuppressWarnings("unchecked")
private void completeDefinition(QDataTerm<?> source, QDataTerm<?> target) {
QCompilerLinker compilerLinker = source.getFacet(QCompilerLinker.class);
if (compilerLinker != null && target.getFacet(QExternalFile.class) == null)
target.getFacets().add((QFacet) EcoreUtil.copy((EObject) compilerLinker));
else if (compilerLinker != null)
throw new RuntimeException("Unexpected condition: 4m8x7t8764xm04w70");
switch (target.getDataTermType()) {
case UNARY_ATOMIC:
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
appendDefinition(source.getDefinition(), target);
break;
case UNARY_COMPOUND:
QCharacterDef charDefTo = (QCharacterDef) target.getDefinition();
QDataStructDef dataStructDefFrom = (QDataStructDef) source.getDefinition();
if (containsLike(dataStructDefFrom))
visit(source);
setLength(charDefTo, dataStructDefFrom);
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicDataDef = (QMultipleAtomicDataDef<?>) source.getDefinition();
appendDefinition(multipleAtomicDataDef.getArgument(), target);
break;
case MULTIPLE_COMPOUND:
charDefTo = (QCharacterDef) target.getDefinition();
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
if (containsLike(strollerDef))
visit(source);
setLength(charDefTo, strollerDef);
appendDefinition(strollerDef, target);
break;
}
break;
case UNARY_COMPOUND:
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
case MULTIPLE_ATOMIC:
throw new DevelopmentKitCompilerRuntimeException("Invalid like atomic to compound: " + source.getName() + "->" + target.getName());
case UNARY_COMPOUND:
QCompoundDataDef<?, ?> compoundDataDefTarget = (QCompoundDataDef<?, ?>) target.getDefinition();
if (!compoundDataDefTarget.getElements().isEmpty())
throw new DevelopmentKitCompilerRuntimeException("Invalid compound to compound: " + source.getName() + "->" + target.getName());
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), compoundDataDefTarget);
appendDefinition(source.getDefinition(), target);
compoundDataDefTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
case MULTIPLE_COMPOUND:
((QCompoundDataDef<?, ?>) target.getDefinition()).setQualified(true);
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), (QCompoundDataDef<?, QDataTerm<?>>) target.getDefinition());
appendDefinition(strollerDef, target);
compilerLinker = target.getFacet(QCompilerLinker.class);
strollerDef.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
}
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicTarget = (QMultipleAtomicDataDef<?>) target.getDefinition();
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
appendDefinitionToMultiple(source.getDefinition(), (QDataTerm<QMultipleAtomicDataDef<?>>) target);
break;
case UNARY_COMPOUND:
QCharacterDef charDefTo = (QCharacterDef) multipleAtomicTarget.getArgument();
QDataStructDef dataStructDefFrom = (QDataStructDef) source.getDefinition();
if (containsLike(dataStructDefFrom))
visit(source);
setLength(charDefTo, dataStructDefFrom);
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicSource = (QMultipleAtomicDataDef<?>) source.getDefinition();
appendDefinitionToMultiple(multipleAtomicSource.getArgument(), (QDataTerm<QMultipleAtomicDataDef<?>>) target);
break;
case MULTIPLE_COMPOUND:
charDefTo = (QCharacterDef) multipleAtomicTarget.getArgument();
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
if (containsLike(strollerDef))
visit(source);
setLength(charDefTo, strollerDef);
appendDefinition(strollerDef, target.getDefinition());
break;
}
break;
case MULTIPLE_COMPOUND:
QMultipleCompoundDataDef<?, ?> multipleCompoundTarget = (QMultipleCompoundDataDef<?, ?>) target.getDefinition();
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
case MULTIPLE_ATOMIC:
throw new DevelopmentKitCompilerRuntimeException("Invalid like atomic to compound: " + source.getName() + "->" + target.getName());
case UNARY_COMPOUND:
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), multipleCompoundTarget);
appendDefinition(source.getDefinition(), target.getDefinition());
multipleCompoundTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
case MULTIPLE_COMPOUND:
QMultipleCompoundDataDef<?, ?> multipleCompoundSource = (QMultipleCompoundDataDef<?, ?>) source.getDefinition();
copyCompoundDataDef(multipleCompoundSource, multipleCompoundTarget);
appendDefinition(source.getDefinition(), target.getDefinition());
multipleCompoundTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
}
break;
}
}
private boolean containsLike(QDataStructDef structDef) {
boolean result = false;
for (QDataTerm<QBufferedDataDef<?>> element : structDef.getElements()) {
if (element.getLike() != null) {
result = true;
break;
}
}
return result;
}
private boolean containsLike(QStrollerDef<?> strollerDef) {
boolean result = false;
for (QDataTerm<QBufferedDataDef<?>> element : strollerDef.getElements()) {
if (element.getLike() != null) {
result = true;
break;
}
}
return result;
}
@Override
public RPJAbstractDataRefactor copy() {
return new RPJDataLikeRefactor(getCompilationUnit());
}
} | org.smeup.sys.dk.compiler.rpj/src/org/smeup/sys/dk/compiler/rpj/RPJDataLikeRefactor.java | /**
* Copyright (c) 2012, 2016 Sme.UP and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
*
* Contributors:
* Mattia Rocchi - Initial API and implementation
* Giuliano Giancristofaro - Implementation
*/
package org.smeup.sys.dk.compiler.rpj;
import javax.inject.Inject;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.smeup.sys.dk.compiler.DevelopmentKitCompilerRuntimeException;
import org.smeup.sys.dk.compiler.QCompilationUnit;
import org.smeup.sys.dk.compiler.QCompilerLinker;
import org.smeup.sys.dk.compiler.QDevelopmentKitCompilerFactory;
import org.smeup.sys.il.core.meta.QFacet;
import org.smeup.sys.il.data.def.QBufferDef;
import org.smeup.sys.il.data.def.QBufferedDataDef;
import org.smeup.sys.il.data.def.QCharacterDef;
import org.smeup.sys.il.data.def.QCompoundDataDef;
import org.smeup.sys.il.data.def.QDataStructDef;
import org.smeup.sys.il.data.def.QMultipleAtomicDataDef;
import org.smeup.sys.il.data.def.QMultipleCompoundDataDef;
import org.smeup.sys.il.data.def.QNumericDef;
import org.smeup.sys.il.data.def.QStrollerDef;
import org.smeup.sys.il.data.term.QDataTerm;
import org.smeup.sys.il.esam.QDataSetTerm;
import org.smeup.sys.il.esam.QDisplayTerm;
import org.smeup.sys.il.esam.QPrintTerm;
import org.smeup.sys.il.expr.IntegratedLanguageExpressionRuntimeException;
import org.smeup.sys.il.flow.QCallableUnit;
import org.smeup.sys.il.flow.QFileSection;
import org.smeup.sys.il.flow.QPrototype;
import org.smeup.sys.os.file.QExternalFile;
public class RPJDataLikeRefactor extends RPJAbstractDataRefactor {
@Inject
public RPJDataLikeRefactor(QCompilationUnit compilationUnit) {
super(compilationUnit);
}
@Override
public boolean visit(QDataTerm<?> target) {
// TODO
if(target.getDataTermType() == null)
return super.visit(target);
if (target.getLike() == null) {
switch (target.getDataTermType()) {
case MULTIPLE_ATOMIC:
case MULTIPLE_COMPOUND:
case UNARY_COMPOUND:
return super.visit(target);
case UNARY_ATOMIC:
if(target instanceof QPrototype)
break;
// search file definition
if(target.getDefinition() instanceof QBufferedDataDef<?>) {
QBufferedDataDef<?> bufferedDataDef = (QBufferedDataDef<?>) target.getDefinition();
if(bufferedDataDef.getLength() == 0) {
if(bufferedDataDef instanceof QBufferDef)
break;
if(bufferedDataDef instanceof QNumericDef)
break;
QDataTerm<?> displayTerm = getFromFiles(target);
if(displayTerm != null) {
completeDefinition(displayTerm, target);
return false;
}
}
}
}
return super.visit(target);
}
String like = target.getLike();
if (like.toUpperCase().startsWith("*IN") && !like.equalsIgnoreCase("*IN"))
like = "*INKA";
QDataTerm<?> source = getCompilationUnit().getDataTerm(like, true);
if (source == null)
throw new IntegratedLanguageExpressionRuntimeException("Invalid liked data term: " + target.getLike());
if (source.getLike() != null)
visit(source);
completeDefinition(source, target);
target.setLike(null);
return false;
}
private QDataTerm<?> getFromFiles(QDataTerm<?> target) {
QCallableUnit callableUnit = (QCallableUnit) getCompilationUnit().getNode();
QFileSection fileSection = callableUnit.getFileSection();
if(fileSection == null)
return null;
for(QDisplayTerm displayTerm: fileSection.getDisplays()) {
for(QDataTerm<?> displayElement: displayTerm.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), displayElement.getName()))
return displayElement;
}
}
for(QDataSetTerm dataSet: fileSection.getDataSets()) {
for(QDataTerm<?> dataSetElement: dataSet.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), dataSetElement.getName()))
return dataSetElement;
}
}
for(QPrintTerm printTerm: fileSection.getPrinters()) {
for(QDataTerm<?> printElement: printTerm.getFormat().getDefinition().getElements()) {
if(getCompilationUnit().equalsTermName(target.getName(), printElement.getName()))
return printElement;
}
}
return null;
}
@SuppressWarnings("unchecked")
private void completeDefinition(QDataTerm<?> source, QDataTerm<?> target) {
QCompilerLinker compilerLinker = source.getFacet(QCompilerLinker.class);
if (compilerLinker != null && target.getFacet(QExternalFile.class) == null)
target.getFacets().add((QFacet) EcoreUtil.copy((EObject) compilerLinker));
else if (compilerLinker != null)
throw new RuntimeException("Unexpected condition: 4m8x7t8764xm04w70");
switch (target.getDataTermType()) {
case UNARY_ATOMIC:
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
appendDefinition(source.getDefinition(), target);
break;
case UNARY_COMPOUND:
QCharacterDef charDefTo = (QCharacterDef) target.getDefinition();
QDataStructDef dataStructDefFrom = (QDataStructDef) source.getDefinition();
if (containsLike(dataStructDefFrom))
visit(source);
setLength(charDefTo, dataStructDefFrom);
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicDataDef = (QMultipleAtomicDataDef<?>) source.getDefinition();
appendDefinition(multipleAtomicDataDef.getArgument(), target);
break;
case MULTIPLE_COMPOUND:
charDefTo = (QCharacterDef) target.getDefinition();
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
if (containsLike(strollerDef))
visit(source);
setLength(charDefTo, strollerDef);
appendDefinition(strollerDef, target);
break;
}
break;
case UNARY_COMPOUND:
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
case MULTIPLE_ATOMIC:
throw new DevelopmentKitCompilerRuntimeException("Invalid like atomic to compound: " + source.getName() + "->" + target.getName());
case UNARY_COMPOUND:
QCompoundDataDef<?, ?> compoundDataDefTarget = (QCompoundDataDef<?, ?>) target.getDefinition();
if (!compoundDataDefTarget.getElements().isEmpty())
throw new DevelopmentKitCompilerRuntimeException("Invalid compound to compound: " + source.getName() + "->" + target.getName());
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), compoundDataDefTarget);
appendDefinition(source.getDefinition(), target);
compoundDataDefTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
case MULTIPLE_COMPOUND:
((QCompoundDataDef<?, ?>) target.getDefinition()).setQualified(true);
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), (QCompoundDataDef<?, QDataTerm<?>>) target.getDefinition());
appendDefinition(strollerDef, target);
compilerLinker = target.getFacet(QCompilerLinker.class);
strollerDef.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
}
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicTarget = (QMultipleAtomicDataDef<?>) target.getDefinition();
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
appendDefinitionToMultiple(source.getDefinition(), (QDataTerm<QMultipleAtomicDataDef<?>>) target);
break;
case UNARY_COMPOUND:
QCharacterDef charDefTo = (QCharacterDef) multipleAtomicTarget.getArgument();
QDataStructDef dataStructDefFrom = (QDataStructDef) source.getDefinition();
if (containsLike(dataStructDefFrom))
visit(source);
setLength(charDefTo, dataStructDefFrom);
break;
case MULTIPLE_ATOMIC:
QMultipleAtomicDataDef<?> multipleAtomicSource = (QMultipleAtomicDataDef<?>) source.getDefinition();
appendDefinitionToMultiple(multipleAtomicSource.getArgument(), (QDataTerm<QMultipleAtomicDataDef<?>>) target);
break;
case MULTIPLE_COMPOUND:
charDefTo = (QCharacterDef) multipleAtomicTarget.getArgument();
QStrollerDef<?> strollerDef = (QStrollerDef<?>) source.getDefinition();
if (containsLike(strollerDef))
visit(source);
setLength(charDefTo, strollerDef);
appendDefinition(strollerDef, target.getDefinition());
break;
}
break;
case MULTIPLE_COMPOUND:
QMultipleCompoundDataDef<?, ?> multipleCompoundTarget = (QMultipleCompoundDataDef<?, ?>) target.getDefinition();
switch (source.getDataTermType()) {
case UNARY_ATOMIC:
case MULTIPLE_ATOMIC:
throw new DevelopmentKitCompilerRuntimeException("Invalid like atomic to compound: " + source.getName() + "->" + target.getName());
case UNARY_COMPOUND:
copyCompoundDataDef((QCompoundDataDef<?, ?>) source.getDefinition(), multipleCompoundTarget);
appendDefinition(source.getDefinition(), target.getDefinition());
multipleCompoundTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
case MULTIPLE_COMPOUND:
QMultipleCompoundDataDef<?, ?> multipleCompoundSource = (QMultipleCompoundDataDef<?, ?>) source.getDefinition();
copyCompoundDataDef(multipleCompoundSource, multipleCompoundTarget);
appendDefinition(source.getDefinition(), target.getDefinition());
multipleCompoundTarget.setQualified(true);
if (compilerLinker == null) {
compilerLinker = QDevelopmentKitCompilerFactory.eINSTANCE.createCompilerLinker();
compilerLinker.setLinkedTermName(source.getName());
target.getFacets().add(compilerLinker);
}
break;
}
break;
}
}
private boolean containsLike(QDataStructDef structDef) {
boolean result = false;
for (QDataTerm<QBufferedDataDef<?>> element : structDef.getElements()) {
if (element.getLike() != null) {
result = true;
break;
}
}
return result;
}
private boolean containsLike(QStrollerDef<?> strollerDef) {
boolean result = false;
for (QDataTerm<QBufferedDataDef<?>> element : strollerDef.getElements()) {
if (element.getLike() != null) {
result = true;
break;
}
}
return result;
}
@Override
public RPJAbstractDataRefactor copy() {
return new RPJDataLikeRefactor(getCompilationUnit());
}
} | Pacth per recupero definizioni da moduli | org.smeup.sys.dk.compiler.rpj/src/org/smeup/sys/dk/compiler/rpj/RPJDataLikeRefactor.java | Pacth per recupero definizioni da moduli | <ide><path>rg.smeup.sys.dk.compiler.rpj/src/org/smeup/sys/dk/compiler/rpj/RPJDataLikeRefactor.java
<ide> if(bufferedDataDef instanceof QNumericDef)
<ide> break;
<ide>
<add> // from files
<ide> QDataTerm<?> displayTerm = getFromFiles(target);
<add> if(displayTerm == null)
<add> displayTerm = getFromModules(target);
<add>
<ide> if(displayTerm != null) {
<ide> completeDefinition(displayTerm, target);
<ide> return false;
<ide> return false;
<ide> }
<ide>
<add> // TODO move this method in QCompilationUnit
<add> private QDataTerm<?> getFromModules(QDataTerm<?> target) {
<add>
<add> for(QCompilationUnit childUnit: getCompilationUnit().getChildCompilationUnits()) {
<add> QDataTerm<?> childDataTerm = childUnit.getDataTerm(target.getName(), true);
<add> if(childDataTerm != null)
<add> return childDataTerm;
<add> }
<add>
<add> return null;
<add> }
<add>
<add> // TODO move this method in QCompilationUnit
<ide> private QDataTerm<?> getFromFiles(QDataTerm<?> target) {
<ide>
<ide> QCallableUnit callableUnit = (QCallableUnit) getCompilationUnit().getNode(); |
|
Java | apache-2.0 | a0197b4472fd8aa368324500a8024dc925288018 | 0 | fusepool/datalifecycle | package eu.fusepool.datalifecycle;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.security.AccessController;
import java.security.AllPermission;
import java.security.Permission;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.clerezza.jaxrs.utils.RedirectUtil;
import org.apache.clerezza.jaxrs.utils.TrailingSlash;
import org.apache.clerezza.rdf.core.MGraph;
import org.apache.clerezza.rdf.core.NonLiteral;
import org.apache.clerezza.rdf.core.Resource;
import org.apache.clerezza.rdf.core.Triple;
import org.apache.clerezza.rdf.core.TripleCollection;
import org.apache.clerezza.rdf.core.UriRef;
import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException;
import org.apache.clerezza.rdf.core.access.LockableMGraph;
import org.apache.clerezza.rdf.core.access.TcManager;
import org.apache.clerezza.rdf.core.access.security.TcAccessController;
import org.apache.clerezza.rdf.core.access.security.TcPermission;
import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
import org.apache.clerezza.rdf.core.impl.TripleImpl;
import org.apache.clerezza.rdf.core.serializedform.Parser;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.ontologies.DCTERMS;
import org.apache.clerezza.rdf.ontologies.OWL;
import org.apache.clerezza.rdf.ontologies.RDF;
import org.apache.clerezza.rdf.ontologies.RDFS;
import org.apache.clerezza.rdf.utils.GraphNode;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.ConfigurationPolicy;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
import org.apache.stanbol.commons.web.viewable.RdfViewable;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the controller class of the fusepool data life cycle component. The main functionalities provided are
* 1) XML2RDF transformation
* 2) Indexing and Information Extraction
* 3) Reconciliation/Interlinking
* 4) Smushing
*/
@Component(immediate = true, metatype = true,
policy = ConfigurationPolicy.OPTIONAL)
@Properties( value={
@Property(name = "javax.ws.rs", boolValue = true),
@Property(name=Constants.SERVICE_RANKING,intValue=SourcingAdmin.DEFAULT_SERVICE_RANKING),
@Property(name=SourcingAdmin.BASE_URI_NAME, value="", description=SourcingAdmin.BASE_URI_DESCRIPTION)
})
@Service(Object.class)
@Path("sourcing")
public class SourcingAdmin {
public static final int DEFAULT_SERVICE_RANKING = 101;
// URI for rewriting from urn scheme to http
// base uri service property name
public static final String BASE_URI_NAME = "base.uri";
// base uri updated at service activation from the service property in the osgi console
private String baseUri;
public static final String BASE_URI_DESCRIPTION = "Base URI to be used when publishing data.";
/**
* Using slf4j for normal logging
*/
private static final Logger log = LoggerFactory.getLogger(SourcingAdmin.class);
BundleContext bundleCtx = null;
@Reference
private Parser parser;
@Reference
private Serializer serializer;
/**
* This service allows accessing and creating persistent triple collections
*/
@Reference
private TcManager tcManager;
@Reference
private Interlinker interlinker;
// text etractors references (RdfDigester implementations)
ServiceReference[] digestersRefs = null;
//@Reference(target="(extractorType=patent)")
//private RdfDigester patentDigester;
//@Reference(target="(extractorType=pubmed)")
//private RdfDigester pubmedDigester;
/**
* This is the name of the graph in which we "log" the requests
*/
//private UriRef REQUEST_LOG_GRAPH_NAME = new UriRef("http://example.org/resource-resolver-log.graph");
/**
* Name of the data life cycle graph. It is used as a register of other
* graphs to manage their life cycle
*/
public static final UriRef DATA_LIFECYCLE_GRAPH_REFERENCE = new UriRef("urn:x-localinstance:/dlc/meta.graph");
/**
* Register graph referencing graphs for life cycle monitoring;
*/
private final String CONTENT_GRAPH_NAME = "urn:x-localinstance:/content.graph";
private UriRef CONTENT_GRAPH_REF = new UriRef(CONTENT_GRAPH_NAME);
// Operation codes
private final int RDFIZE = 1;
private final int ADD_TRIPLES_OPERATION = 2;
private final int TEXT_EXTRACTION = 3;
private final int RECONCILE_GRAPH_OPERATION = 4;
private final int SMUSH_GRAPH_OPERATION = 5;
private final int PUBLISH_DATA = 6;
// RDFdigester
//private final String PUBMED_RDFDIGESTER = "pubmed";
//private final String PATENT_RDFDIGESTER = "patent";
// RDFizer
private final String PUBMED_RDFIZER = "pubmed";
private final String PATENT_RDFIZER = "patent";
/**
* For each rdf triple collection uploaded 5 graphs are created.
* 1) a source graph to store the rdf data
* 2) an enhancements graph to store the text extracted for indexing and the
* entities extracted from the text by NLP engines in the default enhancement chain
* 3) a graph to store the result of the interlinking task
* 4) a graph to store the smushed graph
* 5) a graph to store the published graph i.e. the smushed graph in a coherent state with data in the content graph
* The name convention for these graphs is
* GRAPH_URN_PREFIX + timestamp + SUFFIX
* where SUFFIX can be one of SOURCE_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX,
* INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX, PUBLISH_GRAPH_URN_SUFFIX
*/
// base graph uri
public static final String GRAPH_URN_PREFIX = "urn:x-localinstance:/dlc/";
// graph suffix
public static final String SOURCE_GRAPH_URN_SUFFIX = "/rdf.graph";
// enhancements graph suffix
public static final String ENHANCE_GRAPH_URN_SUFFIX = "/enhance.graph";
// interlink graph suffix
public static final String INTERLINK_GRAPH_URN_SUFFIX = "/interlink.graph";
// smushed graph suffix
public static final String SMUSH_GRAPH_URN_SUFFIX = "/smush.graph";
// published graph suffix
public static final String PUBLISH_GRAPH_URN_SUFFIX = "/publish.graph";
private UriRef pipeRef = null;
@SuppressWarnings("unchecked")
@Activate
protected void activate(ComponentContext context) {
log.info("The Sourcing Admin Service is being activated");
// Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets
// Get the value of the base uri from the service property set in the Felix console
Dictionary<String,Object> dict = context.getProperties() ;
Object baseUriObj = dict.get(BASE_URI_NAME) ;
baseUri = baseUriObj.toString();
// Get RDFDigesters
bundleCtx = context.getBundleContext();
// Get RDFDigesters references
try {
digestersRefs = bundleCtx.getServiceReferences(RdfDigester.class.getName(),"(digesterImpl=*)");
if (digestersRefs != null) {
for (ServiceReference digesterRef : digestersRefs) {
String digesterImpl = (String) digesterRef.getProperty("digesterImpl");
log.info("SourcingAdmin RDFDigester services available: " + digesterImpl);
}
}
}
catch (InvalidSyntaxException e) {
e.printStackTrace();
}
try {
createDlcGraph();
log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle");
} catch (EntityAlreadyExistsException ex) {
log.info("Data Lifecycle Graph already exists.");
}
}
@Deactivate
protected void deactivate(ComponentContext context) {
log.info("The Sourcing Admin Service is being deactivated");
}
/**
* This method return an RdfViewable, this is an RDF serviceUri with
* associated presentational information.
*/
@GET
public RdfViewable serviceEntry(@Context final UriInfo uriInfo,
@QueryParam("url") final UriRef url,
@HeaderParam("user-agent") String userAgent) throws Exception {
//this makes sure we are nt invoked with a trailing slash which would affect
//relative resolution of links (e.g. css)
TrailingSlash.enforcePresent(uriInfo);
final String resourcePath = uriInfo.getAbsolutePath().toString();
if (url != null) {
String query = url.toString();
log.info(query);
}
//The URI at which this service was accessed, this will be the
//central serviceUri in the response
final UriRef serviceUri = new UriRef(resourcePath);
//the in memory graph to which the triples for the response are added
final MGraph responseGraph = new IndexedMGraph();
Lock rl = getDlcGraph().getLock().readLock();
rl.lock();
try {
responseGraph.addAll(getDlcGraph());
}
finally {
rl.unlock();
}
// Add information about the available digester services
for (ServiceReference digesterRef : digestersRefs) {
String digesterImpl = (String) digesterRef.getProperty("digesterImpl");
responseGraph.add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.service, new UriRef("urn:x-temp:/" + digesterImpl)));
responseGraph.add(new TripleImpl(new UriRef("urn:x-temp:/" + digesterImpl), RDFS.label, new PlainLiteralImpl(digesterImpl)));
}
//This GraphNode represents the service within our result graph
final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, responseGraph);
//What we return is the GraphNode we created with a template path
return new RdfViewable("SourcingAdmin", node, SourcingAdmin.class);
}
private void setPipeRef(UriRef pipeRef) {
this.pipeRef = pipeRef;
}
private LockableMGraph getSourceGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX));
}
private LockableMGraph getEnhanceGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX));
}
private LockableMGraph getInterlinkGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX));
}
private LockableMGraph getSmushGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SMUSH_GRAPH_URN_SUFFIX));
}
private LockableMGraph getPublishGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + PUBLISH_GRAPH_URN_SUFFIX));
}
private LockableMGraph getContentGraph() {
return tcManager.getMGraph( CONTENT_GRAPH_REF );
}
/**
* Creates a new empty graph
*
* @param uriInfo
* @param graphName
* @return
* @throws Exception
*/
@POST
@Path("create_pipe")
@Produces("text/plain")
public Response createPipeRequest(@Context final UriInfo uriInfo,
@FormParam("pipe_label") final String pipeLabel) throws Exception {
AccessController.checkPermission(new AllPermission());
//some simplicistic (and too restrictive) validation
/*
try {
new URI(graphName);
} catch (URISyntaxException e) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Graphname is not a valid URI: " + e.getReason()).build();
}
if (!graphName.contains(":")) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Graphname is not a valid URI: No colon separating scheme").build();
}
*/
// Set up the pipe's graphs
AccessController.checkPermission(new AllPermission());
if (createPipe(pipeLabel)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Cannot create graph" + pipeLabel).build();
} else {
return RedirectUtil.createSeeOtherResponse("./", uriInfo);
}
}
/**
* Creates a new pipe with tasks and product graphs and adds its uri and a label to the data life cycle graph.
* A graph will contain the RDF data uploaded or sent by a transformation task
* that have to be processed (text extraction, NLP processing, reconciliation, smushing).
* The following graphs are created to store the results of the processing tasks
* enhance.graph
* interlink.graph
* smush.graph
* These graphs will be empty at the beginning.
*
*
* @return
*/
private boolean createPipe(String pipeLabel) {
boolean graphExists = false;
try {
String timeStamp = String.valueOf(System.currentTimeMillis());
// create a pipe
UriRef pipeRef = new UriRef(GRAPH_URN_PREFIX + timeStamp);
getDlcGraph().add(new TripleImpl(pipeRef, RDF.type, Ontology.Pipe));
if(pipeLabel != null & ! "".equals(pipeLabel)) {
getDlcGraph().add(new TripleImpl(pipeRef, RDFS.label, new PlainLiteralImpl(pipeLabel)));
}
getDlcGraph().add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.pipe, pipeRef));
// create tasks
//rdf task
UriRef rdfTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/rdf");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, rdfTaskRef));
getDlcGraph().add(new TripleImpl(rdfTaskRef, RDF.type, Ontology.RdfTask));
// enhance task
UriRef enhanceTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/enhance");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, enhanceTaskRef));
getDlcGraph().add(new TripleImpl(enhanceTaskRef, RDF.type, Ontology.EnhanceTask));
// interlink task
UriRef interlinkTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/interlink");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, interlinkTaskRef));
getDlcGraph().add(new TripleImpl(interlinkTaskRef, RDF.type, Ontology.InterlinkTask));
// smush task
UriRef smushTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/smush");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, smushTaskRef));
getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.SmushTask));
// publish task
UriRef publishTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/publish");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, publishTaskRef));
getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.PublishTask));
// create the source graph for the dataset (result of transformation in RDF)
String sourceGraphName = GRAPH_URN_PREFIX + timeStamp + SOURCE_GRAPH_URN_SUFFIX;
UriRef sourceGraphRef = new UriRef(sourceGraphName);
tcManager.createMGraph(sourceGraphRef);
//GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
//dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef);
getDlcGraph().add(new TripleImpl(rdfTaskRef, Ontology.deliverable, sourceGraphRef));
getDlcGraph().add(new TripleImpl(sourceGraphRef, RDF.type, Ontology.voidDataset));
// create the graph to store text and enhancements
String enhancementsGraphName = GRAPH_URN_PREFIX + timeStamp + ENHANCE_GRAPH_URN_SUFFIX;
UriRef enhancementsGraphRef = new UriRef(enhancementsGraphName);
tcManager.createMGraph(enhancementsGraphRef);
getDlcGraph().add(new TripleImpl(enhanceTaskRef, Ontology.deliverable, enhancementsGraphRef));
getDlcGraph().add(new TripleImpl(enhancementsGraphRef, RDFS.label, new PlainLiteralImpl("Contains a sioc:content property with text " +
"for indexing and references to entities found in the text by NLP enhancement engines")));
// create the graph to store the result of the interlinking task
String interlinkGraphName = GRAPH_URN_PREFIX + timeStamp + INTERLINK_GRAPH_URN_SUFFIX;
UriRef interlinkGraphRef = new UriRef(interlinkGraphName);
tcManager.createMGraph(interlinkGraphRef);
getDlcGraph().add(new TripleImpl(interlinkTaskRef, Ontology.deliverable, interlinkGraphRef));
getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDF.type, Ontology.voidLinkset));
getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidSubjectsTarget, sourceGraphRef));
getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidLinkPredicate, OWL.sameAs));
getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDFS.label, new PlainLiteralImpl("Contains equivalence links")));
// create the graph to store the result of the smushing task
String smushGraphName = GRAPH_URN_PREFIX + timeStamp + SMUSH_GRAPH_URN_SUFFIX;
UriRef smushGraphRef = new UriRef(smushGraphName);
tcManager.createMGraph(smushGraphRef);
getDlcGraph().add(new TripleImpl(smushTaskRef, Ontology.deliverable, smushGraphRef));
// create the graph to store the result of the publishing task
String publishGraphName = GRAPH_URN_PREFIX + timeStamp + PUBLISH_GRAPH_URN_SUFFIX;
UriRef publishGraphRef = new UriRef(publishGraphName);
tcManager.createMGraph(publishGraphRef);
getDlcGraph().add(new TripleImpl(publishTaskRef, Ontology.deliverable, publishGraphRef));
setPipeRef(pipeRef);
}
catch (UnsupportedOperationException uoe) {
log.error("Error while creating a graph");
}
return graphExists;
}
/**
* Applies one of the following operations to a graph: - add triples
* (operation code: 1) - remove all triples (operation code: 2) - delete
* graph (operation code: 3) - reconcile (operation code: 4) - smush
* (operation code: 5)
*/
@POST
@Path("operate")
@Produces("text/plain")
public String operateOnGraphCommand(@Context final UriInfo uriInfo,
@FormParam("pipe") final UriRef pipeRef,
@FormParam("operation_code") final int operationCode,
@FormParam("data_url") final URL dataUrl,
@FormParam("rdfizer") final String rdfizer,
@FormParam("rdfdigester") final String rdfdigester,
@HeaderParam("Content-Type") String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
// validate arguments and handle all the connection exceptions
return operateOnPipe(pipeRef, operationCode, dataUrl, rdfizer, rdfdigester, mediaType);
}
private String operateOnPipe(UriRef pipeRef,
int operationCode,
URL dataUrl,
String rdfizer,
String rdfdigester,
String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
String message = "";
if (pipeExists(pipeRef)) {
setPipeRef(pipeRef);
switch (operationCode) {
case ADD_TRIPLES_OPERATION:
message = addTriples(pipeRef, dataUrl, mediaType);
break;
case RECONCILE_GRAPH_OPERATION:
message = reconcile(pipeRef, null);
break;
case SMUSH_GRAPH_OPERATION:
message = smush(pipeRef);
break;
case TEXT_EXTRACTION:
message = extractTextFromRdf(pipeRef, rdfdigester);
break;
case RDFIZE:
message = transformXml(dataUrl, rdfizer);
break;
case PUBLISH_DATA:
message = publishData(pipeRef);
break;
}
} else {
message = "The pipe does not exist.";
}
return message;
}
private String transformXml(URL dataUrl, String rdfizer) {
String message = "";
if(PUBMED_RDFIZER.equals(rdfizer)){
message = transformPubMedXml(dataUrl);
}
else if (PATENT_RDFIZER.equals(rdfizer)) {
message = transformPatentXml(dataUrl);
}
return message;
}
private String transformPubMedXml(URL dataUrl) {
String message = "PubMed XML->RDF transformation to be implemented.";
return message;
}
private String transformPatentXml(URL dataUrl) {
String message = "Marec Patent XML->RDF transformation to be implemented";
return message;
}
/**
* Load RDF data into an existing graph from a URL (schemes: "file://" or "http://").
* The arguments to be passed are:
* 1) graph in which the RDF data must be stored
* 2) url of the dataset
* After the upload the input graph is sent to a digester to extract text for indexing and
* adding entities found by NLP components (in the default chain) as subject
*/
private String addTriples(UriRef pipeRef, URL dataUrl, String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
String message = "";
// look up the pipe's rdf graph to which add the data
UriRef graphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
// add the triples of the temporary graph into the graph selected by the user
if (isValidUrl(dataUrl)) {
MGraph updatedGraph = addTriplesCommand(graphRef, dataUrl, mediaType);
message = "Added " + updatedGraph.size() + " triples to " + graphRef.getUnicodeString() + "\n";
} else {
message = "The URL of the data is not a valid one.\n";
}
log.info(message);
return message;
}
private MGraph addTriplesCommand(UriRef graphRef, URL dataUrl, String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
MGraph graph = null;
URLConnection connection = dataUrl.openConnection();
connection.addRequestProperty("Accept", "application/rdf+xml; q=.9, text/turte;q=1");
// create a temporary graph to store the data
SimpleMGraph tempGraph = new SimpleMGraph();
InputStream data = connection.getInputStream();
if (data != null) {
if (mediaType.equals("application/x-www-form-urlencoded")) {
mediaType = getContentTypeFromUrl(dataUrl);
}
parser.parse(tempGraph, data, mediaType);
// add the triples of the temporary graph into the graph selected by the user
if (graphExists(graphRef)) {
graph = tcManager.getMGraph(graphRef);
graph.addAll(tempGraph);
}
}
return tempGraph;
}
/**
* Removes all the triples from the graph
*
*/
private String emptyGraph(UriRef graphRef) {
// removes all the triples from the graph
MGraph graph = tcManager.getMGraph(graphRef);
graph.clear();
return "Graph " + graphRef.getUnicodeString() + " is now empty.";
}
/**
* Deletes a graph, the reference to it in the DLC graph and deletes all the
* derived graphs linked to it by the dcterms:source property.
*
* @param graphRef
* @return
*/
private String deleteGraph(UriRef graphRef) {
tcManager.deleteTripleCollection(graphRef);
GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
//remove the relation with the data lifecycle graph and all the information (triples) about the deleted graph (label).
dlcGraphNode.deleteProperty(DCTERMS.hasPart, graphRef);
return "Graph " + graphRef.getUnicodeString() + " has been deleted.";
}
/**
* Reconciles a graph with a target graph. The result of the reconciliation is an equivalence set
* stored in the interlink graph of the pipe. The graph used as source is the source rdf graph
* @param sourceGraphRef the URI of the referenced graph, i.e. the graph for which the reconciliation should be performed.
* @param targetGraphRef the URI of the target graph. If null the target graph is the same as the source graph.
* @return
* @throws Exception
*/
private String reconcile(UriRef pipeRef, UriRef targetGraphRef) throws Exception {
String message = "";
// Identifier of the link rules within the Silk config file
String linkSpecId = "agents";
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
if (graphExists(sourceGraphRef)) {
//if target graph is not provided the reconciliation will be done against the source graph itself
if(targetGraphRef == null){
targetGraphRef = sourceGraphRef;
}
// reconcile the source graph with the target graph
UriRef interlinkGraphRef = reconcileCommand(pipeRef, sourceGraphRef, targetGraphRef, linkSpecId);
TripleCollection interlinkGraph = tcManager.getMGraph(interlinkGraphRef);
if (interlinkGraph.size() > 0) {
message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n"
+ interlinkGraph.size() + " owl:sameAs statements have been created and stored in " + interlinkGraphRef.getUnicodeString();
}
else {
message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n"
+ "No equivalent entities have been found.";
}
}
else {
message = "The source graph does not exist.";
}
log.info(message);
return message;
}
private UriRef reconcileCommand(UriRef pipeRef, UriRef sourceGraphRef, UriRef targetGraphRef, String linkSpecId) throws Exception {
TripleCollection owlSameAs = null;
// get the pipe's interlink graph to store the result of the reconciliation task
UriRef interlinkGraphRef = new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX);
if (graphExists(sourceGraphRef)) {
// Get the source graph from the triple store
LockableMGraph sourceGrah = tcManager.getMGraph(sourceGraphRef);
// Copy the graph
MGraph copySourceGraph = new SimpleMGraph();
Lock rl = sourceGrah.getLock().readLock();
rl.lock();
try {
copySourceGraph.addAll(sourceGrah);
}
finally {
rl.unlock();
}
// reconcile the source graph with the target graph
owlSameAs = interlinker.interlink(copySourceGraph, targetGraphRef, linkSpecId);
if (owlSameAs.size() > 0) {
LockableMGraph sameAsGraph = tcManager.getMGraph(interlinkGraphRef);
sameAsGraph.addAll(owlSameAs);
// log the result (the equivalence set should be serialized and stored)
Lock l = sameAsGraph.getLock().readLock();
l.lock();
try {
Iterator<Triple> isameas = owlSameAs.iterator();
while (isameas.hasNext()) {
Triple t = isameas.next();
NonLiteral s = t.getSubject();
UriRef p = t.getPredicate();
Resource o = t.getObject();
log.info(s.toString() + p.getUnicodeString() + o.toString() + " .\n");
}
}
finally {
l.unlock();
}
// add a reference of the equivalence set to the source graph
getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidSubjectsTarget, sourceGraphRef));
// add a reference of the equivalence set to the target graph
getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidObjectsTarget, targetGraphRef));
}
}
return interlinkGraphRef;
}
/**
* Smush the enhanced graph using the interlinking graph. More precisely collates URIs coming
* from different equivalent resources in a single one chosen among them. The triples in the
* source graph are copied in the smush graph that is then smushed using the interlinking
* graph.
* @param graphToSmushRef
* @return
*/
private String smush(UriRef pipeRef) {
String message = "Smushing task.\n";
// As the smush.graph must be published it has to contain the sioc.content property and all the subject
// extracted during the extraction phase that are stored in the enhance.graph with all the triples from
// the rdf
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
if(getInterlinkGraph().size() > 0 & getEnhanceGraph().size() > 0) {
LockableMGraph smushedGraph = smushCommand(enhanceGraphRef, getInterlinkGraph());
message = "Smushing of " + enhanceGraphRef.getUnicodeString()
+ " with equivalence set completed. "
+ "Smushed graph size = " + smushedGraph.size() + "\n";
}
else {
message = "No equivalence links available for " + enhanceGraphRef.getUnicodeString() + "\n"
+ "or the enhancement graph is empty.\n"
+ "The smushing task is applied to the enhancement graph using the equivalence set in the interlinking graph.";
}
return message;
}
private LockableMGraph smushCommand(UriRef enhanceGraphRef, LockableMGraph equivalenceSet) {
if(getSmushGraph().size() > 0) {
getSmushGraph().clear();
}
Lock rl = getEnhanceGraph().getLock().readLock();
rl.lock();
try {
// add triples from enhance graph to smush graph
getSmushGraph().addAll(getEnhanceGraph());
log.info("Copied " + getEnhanceGraph().size() + " triples from the enhancement graph into the smush graph.");
SimpleMGraph tempEquivalenceSet = new SimpleMGraph();
tempEquivalenceSet.addAll(equivalenceSet);
// smush and canonicalize uris
IriSmusher smusher = new CanonicalizingSameAsSmusher();
log.info("Smush task started.");
smusher.smush(getSmushGraph(), tempEquivalenceSet, true);
log.info("Smush task completed.");
}
finally {
rl.unlock();
}
//serializer.serialize(System.out, getSmushGraph(), SupportedFormat.RDF_XML);
return getSmushGraph();
}
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
* property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
* will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime
* the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
* and a rdfs:label if available, are added to the patent resource using dcterms:subject property.
* @param pipeRef
* @return
*/
private String extractTextFromRdf(UriRef pipeRef, String selectedDigester){
String message = "";
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
SimpleMGraph tempGraph = new SimpleMGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
}
finally {
rl.unlock();
}
enhanceGraph.addAll(tempGraph);
for (ServiceReference digesterRef : digestersRefs) {
String digesterType = (String) digesterRef.getProperty("digesterImpl");
if(selectedDigester.equals(digesterType)) {
RdfDigester digester = (RdfDigester) bundleCtx.getService(digesterRef);
digester.extractText(enhanceGraph);
message += "Extracted text from " + enhanceGraphRef.getUnicodeString() + " by " + digesterType + " digester";
}
}
return message;
}
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
* property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
* will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime
* the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
* and a rdfs:label if available, are added to the patent resource using dcterms:subject property.
* @param pipeRef
* @return
*/
/*
private String extractTextFromPatent(UriRef pipeRef){
String message = "Extracts text from patents and adds a sioc:content property.\n";
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
SimpleMGraph tempGraph = new SimpleMGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
}
finally {
rl.unlock();
}
enhanceGraph.addAll(tempGraph);
patentDigester.extractText(enhanceGraph);
message += "Extracted text from " + enhanceGraphRef.getUnicodeString();
return message;
}
*/
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
* property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
* will be related to a PubMed article (resource of type bibo:Document) so that the article will be retrieved any time
* the keywords are searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
* and a rdfs:label if available, are added to the article resource using dcterms:subject property.
* @param pipeRef
* @return
*/
/*
private String extractTextFromPubMed(UriRef pipeRef){
String message = "Extract text from PubMed articles and adding a sioc:content property.\n";
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
SimpleMGraph tempGraph = new SimpleMGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
}
finally {
rl.unlock();
}
enhanceGraph.addAll(tempGraph);
pubmedDigester.extractText(enhanceGraph);
message += "Extracted text from " + enhanceGraphRef.getUnicodeString();
return message;
}
*/
/**
* Moves data from smush.grah to content.graph. The triples (facts) in the two graphs must be coherent, i.e. the same.
* Before publishing the current smushed data must be compared with the last published data. New triples
* in the smushed graph not in the published graph must be added while triples in the published graph absent
* in the smushed graph must be removed. The algorithm is as follows
* 1) make all URIs in smush.graph http dereferencable (uri canonicalization)
* 2) find triples in smush.graph not in publish.graph (new triples)
* 3) find triples in publish.graph not in smush.graph (old triples)
* 4) add new triples to content.graph
* 5) remove old triples from content.graph
* 6) delete all triples in publish.graph
* 7) copy triples from smush.graph to publish.graph
*/
private String publishData(UriRef pipeRef) {
String message = "";
// add these triples to the content.graph
MGraph triplesToAdd = new SimpleMGraph();
// remove these triples from the content.graph
MGraph triplesToRemove = new SimpleMGraph();
// make all URIs in smush graph dereferencable
canonicalizeResources(getSmushGraph());
// triples to add to the content.graph
Lock ls = getSmushGraph().getLock().readLock();
ls.lock();
try {
Iterator<Triple> ismush = getSmushGraph().iterator();
while (ismush.hasNext()) {
Triple smushTriple = ismush.next();
if( ! getPublishGraph().contains(smushTriple) ) {
triplesToAdd.add(smushTriple);
}
}
}
finally {
ls.unlock();
}
// triples to remove from the content.graph
Lock lp = getPublishGraph().getLock().readLock();
lp.lock();
try {
Iterator<Triple> ipublish = getPublishGraph().iterator();
while (ipublish.hasNext()) {
Triple publishTriple = ipublish.next();
if( ! getSmushGraph().contains(publishTriple) ) {
triplesToRemove.add(publishTriple);
}
}
}
finally {
lp.unlock();
}
if(triplesToRemove.size() > 0) {
getContentGraph().removeAll(triplesToRemove);
log.info("Removed " + triplesToRemove.size() + " triples from " + CONTENT_GRAPH_REF.getUnicodeString());
}
else {
log.info("No triples to remove from " + CONTENT_GRAPH_REF.getUnicodeString());
}
if(triplesToAdd.size() > 0) {
getContentGraph().addAll(triplesToAdd);
log.info("Added " + triplesToAdd.size() + " triples to " + CONTENT_GRAPH_REF.getUnicodeString());
}
else {
log.info("No triples to add to " + CONTENT_GRAPH_REF.getUnicodeString());
}
getPublishGraph().clear();
Lock rl = getSmushGraph().getLock().readLock();
rl.lock();
try {
getPublishGraph().addAll( getSmushGraph() );
}
finally {
rl.unlock();
}
message = "Copied " + triplesToAdd.size() + " triples from " + pipeRef.getUnicodeString() + " to content-graph";
return message;
}
/**
* All the resources in the smush graph must be http dereferencable when published.
* All the triples in the smush graph are copied into a temporary graph. For each triple the subject and the object
* that have a non-http URI are changed in http uri and an equivalence link is added in the interlinking graph for each
* resource (subject and object) that has been changed.
*/
private void canonicalizeResources(LockableMGraph graph) {
MGraph graphCopy = new SimpleMGraph();
// graph containing the same triple with the http URI for each subject and object
MGraph canonicGraph = new SimpleMGraph();
Lock rl = graph.getLock().readLock();
rl.lock();
try {
graphCopy.addAll(graph);
}
finally {
rl.unlock();
}
Iterator<Triple> ismushTriples = graphCopy.iterator();
while (ismushTriples.hasNext()) {
Triple triple = ismushTriples.next();
UriRef subject = (UriRef) triple.getSubject();
Resource object = triple.getObject();
Set<UriRef> singletonSetSubject = new HashSet<UriRef>();
Set<UriRef> singletonSetObject = new HashSet<UriRef>();
// generate an http URI for both subject and object and add an equivalence link into the interlinking graph
if( subject.getUnicodeString().startsWith("urn:x-temp:") ) {
singletonSetSubject.add( (UriRef) triple.getSubject() );
subject = generateNewHttpUri(singletonSetSubject);
}
if( object.toString().startsWith("urn:x-temp:") ) {
singletonSetObject.add( (UriRef) triple.getObject() );
object = generateNewHttpUri(singletonSetObject);
}
// add the triple with the http uris to the canonic graph
canonicGraph.add(new TripleImpl(subject, triple.getPredicate(), object));
}
graph.clear();
graph.addAll(canonicGraph);
}
/**
* Validate URL
* A valid URL must start with file:/// or http://
*/
private boolean isValidUrl(URL url) {
boolean isValidUrl = false;
if(url != null) {
if( url.toString().startsWith("http://") || url.toString().startsWith("file:/")) {
isValidUrl = true;
}
}
return isValidUrl;
}
/**
* Extracts the content type from the file extension
*
* @param url
* @return
*/
private String getContentTypeFromUrl(URL url) {
String contentType = null;
if (url.getFile().endsWith("ttl")) {
contentType = "text/turtle";
} else if (url.getFile().endsWith("nt")) {
contentType = "text/turtle";
} else {
contentType = "application/rdf+xml";
}
return contentType;
}
/**
* Returns the data life cycle graph containing all the monitored graphs. It
* creates it if doesn't exit yet.
*
* @return
*/
private LockableMGraph getDlcGraph() {
return tcManager.getMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE);
}
/**
* Checks if a graph exists and returns a boolean value.
*
* @param graph_ref
* @return
*/
private boolean graphExists(UriRef graph_ref) {
Set<UriRef> graphs = tcManager.listMGraphs();
Iterator<UriRef> igraphs = graphs.iterator();
while (igraphs.hasNext()) {
UriRef graphRef = igraphs.next();
if (graph_ref.toString().equals(graphRef.toString())) {
return true;
}
}
return false;
}
/**
* Checks whether a pipe exists
*/
private boolean pipeExists(UriRef pipeRef) {
boolean result = false;
if (pipeRef != null) {
GraphNode pipeNode = new GraphNode(pipeRef, getDlcGraph());
if(pipeNode != null) {
result = true;
}
}
return result;
}
/**
* Creates the data lifecycle graph. Must be called at the bundle
* activation if the graph doesn't exists yet.
*/
private MGraph createDlcGraph() {
MGraph dlcGraph = tcManager.createMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE);
TcAccessController tca = new TcAccessController(tcManager);
tca.setRequiredReadPermissions(DATA_LIFECYCLE_GRAPH_REFERENCE,
Collections.singleton((Permission) new TcPermission(
"urn:x-localinstance:/content.graph", "read")));
return dlcGraph;
}
/**
* Generates a new http URI that will be used as the canonical one in place
* of a set of equivalent non-http URIs. An owl:sameAs statement is added to
* the interlinking graph stating that the canonical http URI is equivalent
* to one of the non-http URI in the set of equivalent URIs.
* @param uriRefs
* @return
*/
private UriRef generateNewHttpUri(Set<UriRef> uriRefs) {
UriRef bestNonHttp = chooseBest(uriRefs);
String nonHttpString = bestNonHttp.getUnicodeString();
if (!nonHttpString.startsWith("urn:x-temp:")) {
throw new RuntimeException("Sorry we current assume all non-http "
+ "URIs to be canonicalized to be urn:x-temp");
}
String httpUriString = nonHttpString.replaceFirst("urn:x-temp:", baseUri);
UriRef httpUriRef = new UriRef(httpUriString);
// add an owl:sameAs statement in the interlinking graph
getInterlinkGraph().add(new TripleImpl(bestNonHttp, OWL.sameAs, httpUriRef));
return httpUriRef;
}
private UriRef chooseBest(Set<UriRef> httpUri) {
Iterator<UriRef> iter = httpUri.iterator();
UriRef best = iter.next();
while (iter.hasNext()) {
UriRef next = iter.next();
if (next.getUnicodeString().compareTo(best.getUnicodeString()) < 0) {
best = next;
}
}
return best;
}
/**
* An inline class to canonicalize URI from urn to http scheme. A http URI is chosen
* among the equivalent ones.if no one http URI is available a new one is created.
*/
private class CanonicalizingSameAsSmusher extends IriSmusher {
@Override
protected UriRef getPreferedIri(Set<UriRef> uriRefs) {
Set<UriRef> httpUri = new HashSet<UriRef>();
for (UriRef uriRef : uriRefs) {
if (uriRef.getUnicodeString().startsWith("http")) {
httpUri.add(uriRef);
}
}
if (httpUri.size() == 1) {
return httpUri.iterator().next();
}
// There is no http URI in the set of equivalent resource. The entity was unknown.
// A new representation of the entity with http URI will be created.
if (httpUri.size() == 0) {
return generateNewHttpUri(uriRefs);
}
if (httpUri.size() > 1) {
return chooseBest(httpUri);
}
throw new Error("Negative size set.");
}
}
}
| src/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java | package eu.fusepool.datalifecycle;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.security.AccessController;
import java.security.AllPermission;
import java.security.Permission;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.clerezza.jaxrs.utils.RedirectUtil;
import org.apache.clerezza.jaxrs.utils.TrailingSlash;
import org.apache.clerezza.rdf.core.MGraph;
import org.apache.clerezza.rdf.core.NonLiteral;
import org.apache.clerezza.rdf.core.Resource;
import org.apache.clerezza.rdf.core.Triple;
import org.apache.clerezza.rdf.core.TripleCollection;
import org.apache.clerezza.rdf.core.UriRef;
import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException;
import org.apache.clerezza.rdf.core.access.LockableMGraph;
import org.apache.clerezza.rdf.core.access.TcManager;
import org.apache.clerezza.rdf.core.access.security.TcAccessController;
import org.apache.clerezza.rdf.core.access.security.TcPermission;
import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
import org.apache.clerezza.rdf.core.impl.TripleImpl;
import org.apache.clerezza.rdf.core.serializedform.Parser;
import org.apache.clerezza.rdf.core.serializedform.Serializer;
import org.apache.clerezza.rdf.ontologies.DCTERMS;
import org.apache.clerezza.rdf.ontologies.OWL;
import org.apache.clerezza.rdf.ontologies.RDF;
import org.apache.clerezza.rdf.ontologies.RDFS;
import org.apache.clerezza.rdf.utils.GraphNode;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.ConfigurationPolicy;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.stanbol.commons.web.viewable.RdfViewable;
import org.osgi.framework.Constants;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is the controller class of the fusepool data life cycle component. The main functionalities provided are
* 1) XML2RDF transformation
* 2) Indexing and Information Extraction
* 3) Reconciliation/Interlinking
* 4) Smushing
*/
@Component(immediate = true, metatype = true,
policy = ConfigurationPolicy.OPTIONAL)
@Properties( value={
@Property(name = "javax.ws.rs", boolValue = true),
@Property(name=Constants.SERVICE_RANKING,intValue=SourcingAdmin.DEFAULT_SERVICE_RANKING),
@Property(name=SourcingAdmin.BASE_URI_NAME, value="", description=SourcingAdmin.BASE_URI_DESCRIPTION)
})
@Service(Object.class)
@Path("sourcing")
public class SourcingAdmin {
public static final int DEFAULT_SERVICE_RANKING = 101;
// URI for rewriting from urn scheme to http
// base uri service property name
public static final String BASE_URI_NAME = "base.uri";
// base uri updated at service activation from the service property in the osgi console
private String baseUri;
public static final String BASE_URI_DESCRIPTION = "Base URI to be used when publishing data.";
/**
* Using slf4j for normal logging
*/
private static final Logger log = LoggerFactory.getLogger(SourcingAdmin.class);
@Reference
private Parser parser;
@Reference
private Serializer serializer;
/**
* This service allows accessing and creating persistent triple collections
*/
@Reference
private TcManager tcManager;
@Reference
private Interlinker interlinker;
@Reference(target="(extractorType=patent)")
private RdfDigester patentDigester;
@Reference(target="(extractorType=pubmed)")
private RdfDigester pubmedDigester;
/**
* This is the name of the graph in which we "log" the requests
*/
//private UriRef REQUEST_LOG_GRAPH_NAME = new UriRef("http://example.org/resource-resolver-log.graph");
/**
* Name of the data life cycle graph. It is used as a register of other
* graphs to manage their life cycle
*/
public static final UriRef DATA_LIFECYCLE_GRAPH_REFERENCE = new UriRef("urn:x-localinstance:/dlc/meta.graph");
/**
* Register graph referencing graphs for life cycle monitoring;
*/
private final String CONTENT_GRAPH_NAME = "urn:x-localinstance:/content.graph";
private UriRef CONTENT_GRAPH_REF = new UriRef(CONTENT_GRAPH_NAME);
// Operation codes
private final int RDFIZE = 1;
private final int ADD_TRIPLES_OPERATION = 2;
private final int TEXT_EXTRACTION = 3;
private final int RECONCILE_GRAPH_OPERATION = 4;
private final int SMUSH_GRAPH_OPERATION = 5;
private final int PUBLISH_DATA = 6;
// RDFdigester
private final String PUBMED_RDFDIGESTER = "pubmed";
private final String PATENT_RDFDIGESTER = "patent";
// RDFizer
private final String PUBMED_RDFIZER = "pubmed";
private final String PATENT_RDFIZER = "patent";
/**
* For each rdf triple collection uploaded 5 graphs are created.
* 1) a source graph to store the rdf data
* 2) an enhancements graph to store the text extracted for indexing and the
* entities extracted from the text by NLP engines in the default enhancement chain
* 3) a graph to store the result of the interlinking task
* 4) a graph to store the smushed graph
* 5) a graph to store the published graph i.e. the smushed graph in a coherent state with data in the content graph
* The name convention for these graphs is
* GRAPH_URN_PREFIX + timestamp + SUFFIX
* where SUFFIX can be one of SOURCE_GRAPH_URN_SUFFIX, ENHANCE_GRAPH_URN_SUFFIX,
* INTERLINK_GRAPH_URN_SUFFIX, SMUSH_GRAPH_URN_SUFFIX, PUBLISH_GRAPH_URN_SUFFIX
*/
// base graph uri
public static final String GRAPH_URN_PREFIX = "urn:x-localinstance:/dlc/";
// graph suffix
public static final String SOURCE_GRAPH_URN_SUFFIX = "/rdf.graph";
// enhancements graph suffix
public static final String ENHANCE_GRAPH_URN_SUFFIX = "/enhance.graph";
// interlink graph suffix
public static final String INTERLINK_GRAPH_URN_SUFFIX = "/interlink.graph";
// smushed graph suffix
public static final String SMUSH_GRAPH_URN_SUFFIX = "/smush.graph";
// published graph suffix
public static final String PUBLISH_GRAPH_URN_SUFFIX = "/publish.graph";
private UriRef pipeRef = null;
@Activate
protected void activate(ComponentContext context) {
log.info("The Sourcing Admin Service is being activated");
// Creates the data lifecycle graph if it doesn't exists. This graph contains references to graphs and linksets
// Get the value of the base uri from the service property set in the Felix console
Dictionary<String,Object> dict = context.getProperties() ;
Object baseUriObj = dict.get(BASE_URI_NAME) ;
baseUri = baseUriObj.toString();
try {
createDlcGraph();
log.info("Created Data Lifecycle Register Graph. This graph will reference all graphs during their lifecycle");
} catch (EntityAlreadyExistsException ex) {
log.info("Data Lifecycle Graph already exists.");
}
}
@Deactivate
protected void deactivate(ComponentContext context) {
log.info("The Sourcing Admin Service is being deactivated");
}
/**
* This method return an RdfViewable, this is an RDF serviceUri with
* associated presentational information.
*/
@GET
public RdfViewable serviceEntry(@Context final UriInfo uriInfo,
@QueryParam("url") final UriRef url,
@HeaderParam("user-agent") String userAgent) throws Exception {
//this maks sure we are nt invoked with a trailing slash which would affect
//relative resolution of links (e.g. css)
TrailingSlash.enforcePresent(uriInfo);
final String resourcePath = uriInfo.getAbsolutePath().toString();
if (url != null) {
String query = url.toString();
log.info(query);
}
//The URI at which this service was accessed, this will be the
//central serviceUri in the response
final UriRef serviceUri = new UriRef(resourcePath);
//the in memory graph to which the triples for the response are added
//final MGraph responseGraph = new IndexedMGraph();
//This GraphNode represents the service within our result graph
//final GraphNode node = new GraphNode(serviceUri, responseGraph);
//node.addProperty(Ontology.graph, new UriRef("http://fusepool.com/graphs/patentdata"));
//node.addPropertyValue(RDFS.label, "A graph of patent data");
//What we return is the GraphNode we created with a template path
final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
return new RdfViewable("SourcingAdmin", node, SourcingAdmin.class);
}
private void setPipeRef(UriRef pipeRef) {
this.pipeRef = pipeRef;
}
private LockableMGraph getSourceGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX));
}
private LockableMGraph getEnhanceGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX));
}
private LockableMGraph getInterlinkGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX));
}
private LockableMGraph getSmushGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + SMUSH_GRAPH_URN_SUFFIX));
}
private LockableMGraph getPublishGraph() {
return tcManager.getMGraph(new UriRef(pipeRef.getUnicodeString() + PUBLISH_GRAPH_URN_SUFFIX));
}
private LockableMGraph getContentGraph() {
return tcManager.getMGraph( CONTENT_GRAPH_REF );
}
/**
* Creates a new empty graph
*
* @param uriInfo
* @param graphName
* @return
* @throws Exception
*/
@POST
@Path("create_pipe")
@Produces("text/plain")
public Response createPipeRequest(@Context final UriInfo uriInfo,
@FormParam("pipe_label") final String pipeLabel) throws Exception {
AccessController.checkPermission(new AllPermission());
//some simplicistic (and too restrictive) validation
/*
try {
new URI(graphName);
} catch (URISyntaxException e) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Graphname is not a valid URI: " + e.getReason()).build();
}
if (!graphName.contains(":")) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Graphname is not a valid URI: No colon separating scheme").build();
}
*/
// Set up the pipe's graphs
AccessController.checkPermission(new AllPermission());
if (createPipe(pipeLabel)) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("Cannot create graph" + pipeLabel).build();
} else {
return RedirectUtil.createSeeOtherResponse("./", uriInfo);
}
}
/**
* Creates a new pipe with tasks and product graphs and adds its uri and a label to the data life cycle graph.
* A graph will contain the RDF data uploaded or sent by a transformation task
* that have to be processed (text extraction, NLP processing, reconciliation, smushing).
* The following graphs are created to store the results of the processing tasks
* enhance.graph
* interlink.graph
* smush.graph
* These graphs will be empty at the beginning.
*
*
* @return
*/
private boolean createPipe(String pipeLabel) {
boolean graphExists = false;
try {
String timeStamp = String.valueOf(System.currentTimeMillis());
// create a pipe
UriRef pipeRef = new UriRef(GRAPH_URN_PREFIX + timeStamp);
getDlcGraph().add(new TripleImpl(pipeRef, RDF.type, Ontology.Pipe));
if(pipeLabel != null & ! "".equals(pipeLabel)) {
getDlcGraph().add(new TripleImpl(pipeRef, RDFS.label, new PlainLiteralImpl(pipeLabel)));
}
getDlcGraph().add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.pipe, pipeRef));
// create tasks
//rdf task
UriRef rdfTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/rdf");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, rdfTaskRef));
getDlcGraph().add(new TripleImpl(rdfTaskRef, RDF.type, Ontology.RdfTask));
// enhance task
UriRef enhanceTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/enhance");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, enhanceTaskRef));
getDlcGraph().add(new TripleImpl(enhanceTaskRef, RDF.type, Ontology.EnhanceTask));
// interlink task
UriRef interlinkTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/interlink");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, interlinkTaskRef));
getDlcGraph().add(new TripleImpl(interlinkTaskRef, RDF.type, Ontology.InterlinkTask));
// smush task
UriRef smushTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/smush");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, smushTaskRef));
getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.SmushTask));
// publish task
UriRef publishTaskRef = new UriRef(GRAPH_URN_PREFIX + timeStamp + "/publish");
getDlcGraph().add(new TripleImpl(pipeRef, Ontology.creates, publishTaskRef));
getDlcGraph().add(new TripleImpl(smushTaskRef, RDF.type, Ontology.PublishTask));
// create the source graph for the dataset (result of transformation in RDF)
String sourceGraphName = GRAPH_URN_PREFIX + timeStamp + SOURCE_GRAPH_URN_SUFFIX;
UriRef sourceGraphRef = new UriRef(sourceGraphName);
tcManager.createMGraph(sourceGraphRef);
//GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
//dlcGraphNode.addProperty(DCTERMS.hasPart, graphRef);
getDlcGraph().add(new TripleImpl(rdfTaskRef, Ontology.deliverable, sourceGraphRef));
getDlcGraph().add(new TripleImpl(sourceGraphRef, RDF.type, Ontology.voidDataset));
// create the graph to store text and enhancements
String enhancementsGraphName = GRAPH_URN_PREFIX + timeStamp + ENHANCE_GRAPH_URN_SUFFIX;
UriRef enhancementsGraphRef = new UriRef(enhancementsGraphName);
tcManager.createMGraph(enhancementsGraphRef);
getDlcGraph().add(new TripleImpl(enhanceTaskRef, Ontology.deliverable, enhancementsGraphRef));
getDlcGraph().add(new TripleImpl(enhancementsGraphRef, RDFS.label, new PlainLiteralImpl("Contains a sioc:content property with text " +
"for indexing and references to entities found in the text by NLP enhancement engines")));
// create the graph to store the result of the interlinking task
String interlinkGraphName = GRAPH_URN_PREFIX + timeStamp + INTERLINK_GRAPH_URN_SUFFIX;
UriRef interlinkGraphRef = new UriRef(interlinkGraphName);
tcManager.createMGraph(interlinkGraphRef);
getDlcGraph().add(new TripleImpl(interlinkTaskRef, Ontology.deliverable, interlinkGraphRef));
getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDF.type, Ontology.voidLinkset));
getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidSubjectsTarget, sourceGraphRef));
getDlcGraph().add(new TripleImpl(interlinkGraphRef,Ontology.voidLinkPredicate, OWL.sameAs));
getDlcGraph().add(new TripleImpl(interlinkGraphRef, RDFS.label, new PlainLiteralImpl("Contains equivalence links")));
// create the graph to store the result of the smushing task
String smushGraphName = GRAPH_URN_PREFIX + timeStamp + SMUSH_GRAPH_URN_SUFFIX;
UriRef smushGraphRef = new UriRef(smushGraphName);
tcManager.createMGraph(smushGraphRef);
getDlcGraph().add(new TripleImpl(smushTaskRef, Ontology.deliverable, smushGraphRef));
// create the graph to store the result of the publishing task
String publishGraphName = GRAPH_URN_PREFIX + timeStamp + PUBLISH_GRAPH_URN_SUFFIX;
UriRef publishGraphRef = new UriRef(publishGraphName);
tcManager.createMGraph(publishGraphRef);
getDlcGraph().add(new TripleImpl(publishTaskRef, Ontology.deliverable, publishGraphRef));
setPipeRef(pipeRef);
}
catch (UnsupportedOperationException uoe) {
log.error("Error while creating a graph");
}
return graphExists;
}
/**
* Applies one of the following operations to a graph: - add triples
* (operation code: 1) - remove all triples (operation code: 2) - delete
* graph (operation code: 3) - reconcile (operation code: 4) - smush
* (operation code: 5)
*/
@POST
@Path("operate")
@Produces("text/plain")
public String operateOnGraphCommand(@Context final UriInfo uriInfo,
@FormParam("pipe") final UriRef pipeRef,
@FormParam("operation_code") final int operationCode,
@FormParam("data_url") final URL dataUrl,
@FormParam("rdfizer") final String rdfizer,
@FormParam("rdfdigester") final String rdfdigester,
@HeaderParam("Content-Type") String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
// validate arguments and handle all the connection exceptions
return operateOnPipe(pipeRef, operationCode, dataUrl, rdfizer, rdfdigester, mediaType);
}
private String operateOnPipe(UriRef pipeRef,
int operationCode,
URL dataUrl,
String rdfizer,
String rdfdigester,
String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
String message = "";
if (pipeExists(pipeRef)) {
setPipeRef(pipeRef);
switch (operationCode) {
case ADD_TRIPLES_OPERATION:
message = addTriples(pipeRef, dataUrl, mediaType);
break;
case RECONCILE_GRAPH_OPERATION:
message = reconcile(pipeRef, null);
break;
case SMUSH_GRAPH_OPERATION:
message = smush(pipeRef);
break;
case TEXT_EXTRACTION:
message = extractText(pipeRef, rdfdigester);
break;
case RDFIZE:
message = transformXml(dataUrl, rdfizer);
break;
case PUBLISH_DATA:
message = publishData(pipeRef);
break;
}
} else {
message = "The pipe does not exist.";
}
return message;
}
private String transformXml(URL dataUrl, String rdfizer) {
String message = "";
if(PUBMED_RDFIZER.equals(rdfizer)){
message = transformPubMedXml(dataUrl);
}
else if (PATENT_RDFIZER.equals(rdfizer)) {
message = transformPatentXml(dataUrl);
}
return message;
}
private String transformPubMedXml(URL dataUrl) {
String message = "PubMed XML->RDF transformation to be implemented.";
return message;
}
private String transformPatentXml(URL dataUrl) {
String message = "Marec Patent XML->RDF transformation to be implemented";
return message;
}
/**
* Load RDF data into an existing graph from a URL (schemes: "file://" or "http://").
* The arguments to be passed are:
* 1) graph in which the RDF data must be stored
* 2) url of the dataset
* After the upload the input graph is sent to a digester to extract text for indexing and
* adding entities found by NLP components (in the default chain) as subject
*/
private String addTriples(UriRef pipeRef, URL dataUrl, String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
String message = "";
// look up the pipe's rdf graph to which add the data
UriRef graphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
// add the triples of the temporary graph into the graph selected by the user
if (isValidUrl(dataUrl)) {
MGraph updatedGraph = addTriplesCommand(graphRef, dataUrl, mediaType);
message = "Added " + updatedGraph.size() + " triples to " + graphRef.getUnicodeString() + "\n";
} else {
message = "The URL of the data is not a valid one.\n";
}
log.info(message);
return message;
}
private MGraph addTriplesCommand(UriRef graphRef, URL dataUrl, String mediaType) throws Exception {
AccessController.checkPermission(new AllPermission());
MGraph graph = null;
URLConnection connection = dataUrl.openConnection();
connection.addRequestProperty("Accept", "application/rdf+xml; q=.9, text/turte;q=1");
// create a temporary graph to store the data
SimpleMGraph tempGraph = new SimpleMGraph();
InputStream data = connection.getInputStream();
if (data != null) {
if (mediaType.equals("application/x-www-form-urlencoded")) {
mediaType = getContentTypeFromUrl(dataUrl);
}
parser.parse(tempGraph, data, mediaType);
// add the triples of the temporary graph into the graph selected by the user
if (graphExists(graphRef)) {
graph = tcManager.getMGraph(graphRef);
graph.addAll(tempGraph);
}
}
return tempGraph;
}
/**
* Removes all the triples from the graph
*
*/
private String emptyGraph(UriRef graphRef) {
// removes all the triples from the graph
MGraph graph = tcManager.getMGraph(graphRef);
graph.clear();
return "Graph " + graphRef.getUnicodeString() + " is now empty.";
}
/**
* Deletes a graph, the reference to it in the DLC graph and deletes all the
* derived graphs linked to it by the dcterms:source property.
*
* @param graphRef
* @return
*/
private String deleteGraph(UriRef graphRef) {
tcManager.deleteTripleCollection(graphRef);
GraphNode dlcGraphNode = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
//remove the relation with the data lifecycle graph and all the information (triples) about the deleted graph (label).
dlcGraphNode.deleteProperty(DCTERMS.hasPart, graphRef);
return "Graph " + graphRef.getUnicodeString() + " has been deleted.";
}
/**
* Reconciles a graph with a target graph. The result of the reconciliation is an equivalence set
* stored in the interlink graph of the pipe. The graph used as source is the source rdf graph
* @param sourceGraphRef the URI of the referenced graph, i.e. the graph for which the reconciliation should be performed.
* @param targetGraphRef the URI of the target graph. If null the target graph is the same as the source graph.
* @return
* @throws Exception
*/
private String reconcile(UriRef pipeRef, UriRef targetGraphRef) throws Exception {
String message = "";
// Identifier of the link rules within the Silk config file
String linkSpecId = "agents";
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
if (graphExists(sourceGraphRef)) {
//if target graph is not provided the reconciliation will be done against the source graph itself
if(targetGraphRef == null){
targetGraphRef = sourceGraphRef;
}
// reconcile the source graph with the target graph
UriRef interlinkGraphRef = reconcileCommand(pipeRef, sourceGraphRef, targetGraphRef, linkSpecId);
TripleCollection interlinkGraph = tcManager.getMGraph(interlinkGraphRef);
if (interlinkGraph.size() > 0) {
message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n"
+ interlinkGraph.size() + " owl:sameAs statements have been created and stored in " + interlinkGraphRef.getUnicodeString();
}
else {
message = "A reconciliation task has been done between " + sourceGraphRef.getUnicodeString() + " and " + targetGraphRef.getUnicodeString() + ".\n"
+ "No equivalent entities have been found.";
}
}
else {
message = "The source graph does not exist.";
}
log.info(message);
return message;
}
private UriRef reconcileCommand(UriRef pipeRef, UriRef sourceGraphRef, UriRef targetGraphRef, String linkSpecId) throws Exception {
TripleCollection owlSameAs = null;
// get the pipe's interlink graph to store the result of the reconciliation task
UriRef interlinkGraphRef = new UriRef(pipeRef.getUnicodeString() + INTERLINK_GRAPH_URN_SUFFIX);
if (graphExists(sourceGraphRef)) {
// Get the source graph from the triple store
LockableMGraph sourceGrah = tcManager.getMGraph(sourceGraphRef);
// Copy the graph
MGraph copySourceGraph = new SimpleMGraph();
Lock rl = sourceGrah.getLock().readLock();
rl.lock();
try {
copySourceGraph.addAll(sourceGrah);
}
finally {
rl.unlock();
}
// reconcile the source graph with the target graph
owlSameAs = interlinker.interlink(copySourceGraph, targetGraphRef, linkSpecId);
if (owlSameAs.size() > 0) {
LockableMGraph sameAsGraph = tcManager.getMGraph(interlinkGraphRef);
sameAsGraph.addAll(owlSameAs);
// log the result (the equivalence set should be serialized and stored)
Lock l = sameAsGraph.getLock().readLock();
l.lock();
try {
Iterator<Triple> isameas = owlSameAs.iterator();
while (isameas.hasNext()) {
Triple t = isameas.next();
NonLiteral s = t.getSubject();
UriRef p = t.getPredicate();
Resource o = t.getObject();
log.info(s.toString() + p.getUnicodeString() + o.toString() + " .\n");
}
}
finally {
l.unlock();
}
// add a reference of the equivalence set to the source graph
getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidSubjectsTarget, sourceGraphRef));
// add a reference of the equivalence set to the target graph
getDlcGraph().add(new TripleImpl(interlinkGraphRef, Ontology.voidObjectsTarget, targetGraphRef));
}
}
return interlinkGraphRef;
}
/**
* Smush the enhanced graph using the interlinking graph. More precisely collates URIs coming
* from different equivalent resources in a single one chosen among them. The triples in the
* source graph are copied in the smush graph that is then smushed using the interlinking
* graph.
* @param graphToSmushRef
* @return
*/
private String smush(UriRef pipeRef) {
String message = "Smushing task.\n";
// As the smush.graph must be published it has to contain the sioc.content property and all the subject
// extracted during the extraction phase that are stored in the enhance.graph with all the triples from
// the rdf
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
if(getInterlinkGraph().size() > 0 & getEnhanceGraph().size() > 0) {
LockableMGraph smushedGraph = smushCommand(enhanceGraphRef, getInterlinkGraph());
message = "Smushing of " + enhanceGraphRef.getUnicodeString()
+ " with equivalence set completed. "
+ "Smushed graph size = " + smushedGraph.size() + "\n";
}
else {
message = "No equivalence links available for " + enhanceGraphRef.getUnicodeString() + "\n"
+ "or the enhancement graph is empty.\n"
+ "The smushing task is applied to the enhancement graph using the equivalence set in the interlinking graph.";
}
return message;
}
private LockableMGraph smushCommand(UriRef enhanceGraphRef, LockableMGraph equivalenceSet) {
if(getSmushGraph().size() > 0) {
getSmushGraph().clear();
}
Lock rl = getEnhanceGraph().getLock().readLock();
rl.lock();
try {
// add triples from enhance graph to smush graph
getSmushGraph().addAll(getEnhanceGraph());
log.info("Copied " + getEnhanceGraph().size() + " triples from the enhancement graph into the smush graph.");
SimpleMGraph tempEquivalenceSet = new SimpleMGraph();
tempEquivalenceSet.addAll(equivalenceSet);
// smush and canonicalize uris
IriSmusher smusher = new CanonicalizingSameAsSmusher();
log.info("Smush task started.");
smusher.smush(getSmushGraph(), tempEquivalenceSet, true);
log.info("Smush task completed.");
}
finally {
rl.unlock();
}
//serializer.serialize(System.out, getSmushGraph(), SupportedFormat.RDF_XML);
return getSmushGraph();
}
private String extractText(UriRef pipeRef, String rdfdigester) {
String message = "";
if(PATENT_RDFDIGESTER.equals(rdfdigester)){
message = extractTextFromPatent(pipeRef);
}
else if (PUBMED_RDFDIGESTER.equals(rdfdigester)) {
message = extractTextFromPubMed(pipeRef);
}
return message;
}
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
* property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
* will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime
* the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
* and a rdfs:label if available, are added to the patent resource using dcterms:subject property.
* @param pipeRef
* @return
*/
private String extractTextFromPatent(UriRef pipeRef){
String message = "Extracts text from patents and adds a sioc:content property.\n";
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
SimpleMGraph tempGraph = new SimpleMGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
}
finally {
rl.unlock();
}
enhanceGraph.addAll(tempGraph);
patentDigester.extractText(enhanceGraph);
message += "Extracted text from " + enhanceGraphRef.getUnicodeString();
return message;
}
/**
* Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
* property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
* will be related to a PubMed article (resource of type bibo:Document) so that the article will be retrieved any time
* the keywords are searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
* and a rdfs:label if available, are added to the article resource using dcterms:subject property.
* @param pipeRef
* @return
*/
private String extractTextFromPubMed(UriRef pipeRef){
String message = "Extract text from PubMed articles and adding a sioc:content property.\n";
UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
SimpleMGraph tempGraph = new SimpleMGraph();
Lock rl = sourceGraph.getLock().readLock();
rl.lock();
try {
tempGraph.addAll(sourceGraph);
}
finally {
rl.unlock();
}
enhanceGraph.addAll(tempGraph);
pubmedDigester.extractText(enhanceGraph);
message += "Extracted text from " + enhanceGraphRef.getUnicodeString();
return message;
}
/**
* Moves data from smush.grah to content.graph. The triples (facts) in the two graphs must be coherent, i.e. the same.
* Before publishing the current smushed data must be compared with the last published data. New triples
* in the smushed graph not in the published graph must be added while triples in the published graph absent
* in the smushed graph must be removed. The algorithm is as follows
* 1) make all URIs in smush.graph http dereferencable (uri canonicalization)
* 2) find triples in smush.graph not in publish.graph (new triples)
* 3) find triples in publish.graph not in smush.graph (old triples)
* 4) add new triples to content.graph
* 5) remove old triples from content.graph
* 6) delete all triples in publish.graph
* 7) copy triples from smush.graph to publish.graph
*/
private String publishData(UriRef pipeRef) {
String message = "";
// add these triples to the content.graph
MGraph triplesToAdd = new SimpleMGraph();
// remove these triples from the content.graph
MGraph triplesToRemove = new SimpleMGraph();
// make all URIs in smush graph dereferencable
canonicalizeResources(getSmushGraph());
// triples to add to the content.graph
Lock ls = getSmushGraph().getLock().readLock();
ls.lock();
try {
Iterator<Triple> ismush = getSmushGraph().iterator();
while (ismush.hasNext()) {
Triple smushTriple = ismush.next();
if( ! getPublishGraph().contains(smushTriple) ) {
triplesToAdd.add(smushTriple);
}
}
}
finally {
ls.unlock();
}
// triples to remove from the content.graph
Lock lp = getPublishGraph().getLock().readLock();
lp.lock();
try {
Iterator<Triple> ipublish = getPublishGraph().iterator();
while (ipublish.hasNext()) {
Triple publishTriple = ipublish.next();
if( ! getSmushGraph().contains(publishTriple) ) {
triplesToRemove.add(publishTriple);
}
}
}
finally {
lp.unlock();
}
if(triplesToRemove.size() > 0) {
getContentGraph().removeAll(triplesToRemove);
log.info("Removed " + triplesToRemove.size() + " triples from " + CONTENT_GRAPH_REF.getUnicodeString());
}
else {
log.info("No triples to remove from " + CONTENT_GRAPH_REF.getUnicodeString());
}
if(triplesToAdd.size() > 0) {
getContentGraph().addAll(triplesToAdd);
log.info("Added " + triplesToAdd.size() + " triples to " + CONTENT_GRAPH_REF.getUnicodeString());
}
else {
log.info("No triples to add to " + CONTENT_GRAPH_REF.getUnicodeString());
}
getPublishGraph().clear();
Lock rl = getSmushGraph().getLock().readLock();
rl.lock();
try {
getPublishGraph().addAll( getSmushGraph() );
}
finally {
rl.unlock();
}
message = "Copied " + triplesToAdd.size() + " triples from " + pipeRef.getUnicodeString() + " to content-graph";
return message;
}
/**
* All the resources in the smush graph must be http dereferencable when published.
* All the triples in the smush graph are copied into a temporary graph. For each triple the subject and the object
* that have a non-http URI are changed in http uri and an equivalence link is added in the interlinking graph for each
* resource (subject and object) that has been changed.
*/
private void canonicalizeResources(LockableMGraph graph) {
MGraph graphCopy = new SimpleMGraph();
// graph containing the same triple with the http URI for each subject and object
MGraph canonicGraph = new SimpleMGraph();
Lock rl = graph.getLock().readLock();
rl.lock();
try {
graphCopy.addAll(graph);
}
finally {
rl.unlock();
}
Iterator<Triple> ismushTriples = graphCopy.iterator();
while (ismushTriples.hasNext()) {
Triple triple = ismushTriples.next();
UriRef subject = (UriRef) triple.getSubject();
Resource object = triple.getObject();
Set<UriRef> singletonSetSubject = new HashSet<UriRef>();
Set<UriRef> singletonSetObject = new HashSet<UriRef>();
// generate an http URI for both subject and object and add an equivalence link into the interlinking graph
if( subject.getUnicodeString().startsWith("urn:x-temp:") ) {
singletonSetSubject.add( (UriRef) triple.getSubject() );
subject = generateNewHttpUri(singletonSetSubject);
}
if( object.toString().startsWith("urn:x-temp:") ) {
singletonSetObject.add( (UriRef) triple.getObject() );
object = generateNewHttpUri(singletonSetObject);
}
// add the triple with the http uris to the canonic graph
canonicGraph.add(new TripleImpl(subject, triple.getPredicate(), object));
}
graph.clear();
graph.addAll(canonicGraph);
}
/**
* Validate URL
* A valid URL must start with file:/// or http://
*/
private boolean isValidUrl(URL url) {
boolean isValidUrl = false;
if(url != null) {
if( url.toString().startsWith("http://") || url.toString().startsWith("file:/")) {
isValidUrl = true;
}
}
return isValidUrl;
}
/**
* Extracts the content type from the file extension
*
* @param url
* @return
*/
private String getContentTypeFromUrl(URL url) {
String contentType = null;
if (url.getFile().endsWith("ttl")) {
contentType = "text/turtle";
} else if (url.getFile().endsWith("nt")) {
contentType = "text/turtle";
} else {
contentType = "application/rdf+xml";
}
return contentType;
}
/**
* Returns the data life cycle graph containing all the monitored graphs. It
* creates it if doesn't exit yet.
*
* @return
*/
private LockableMGraph getDlcGraph() {
return tcManager.getMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE);
}
/**
* Checks if a graph exists and returns a boolean value.
*
* @param graph_ref
* @return
*/
private boolean graphExists(UriRef graph_ref) {
Set<UriRef> graphs = tcManager.listMGraphs();
Iterator<UriRef> igraphs = graphs.iterator();
while (igraphs.hasNext()) {
UriRef graphRef = igraphs.next();
if (graph_ref.toString().equals(graphRef.toString())) {
return true;
}
}
return false;
}
/**
* Checks whether a pipe exists
*/
private boolean pipeExists(UriRef pipeRef) {
boolean result = false;
if (pipeRef != null) {
GraphNode pipeNode = new GraphNode(pipeRef, getDlcGraph());
if(pipeNode != null) {
result = true;
}
}
return result;
}
/**
* Creates the data lifecycle graph. Must be called at the bundle
* activation if the graph doesn't exists yet.
*/
private MGraph createDlcGraph() {
MGraph dlcGraph = tcManager.createMGraph(DATA_LIFECYCLE_GRAPH_REFERENCE);
TcAccessController tca = new TcAccessController(tcManager);
tca.setRequiredReadPermissions(DATA_LIFECYCLE_GRAPH_REFERENCE,
Collections.singleton((Permission) new TcPermission(
"urn:x-localinstance:/content.graph", "read")));
return dlcGraph;
}
/**
* Generates a new http URI that will be used as the canonical one in place
* of a set of equivalent non-http URIs. An owl:sameAs statement is added to
* the interlinking graph stating that the canonical http URI is equivalent
* to one of the non-http URI in the set of equivalent URIs.
* @param uriRefs
* @return
*/
private UriRef generateNewHttpUri(Set<UriRef> uriRefs) {
UriRef bestNonHttp = chooseBest(uriRefs);
String nonHttpString = bestNonHttp.getUnicodeString();
if (!nonHttpString.startsWith("urn:x-temp:")) {
throw new RuntimeException("Sorry we current assume all non-http "
+ "URIs to be canonicalized to be urn:x-temp");
}
String httpUriString = nonHttpString.replaceFirst("urn:x-temp:", baseUri);
UriRef httpUriRef = new UriRef(httpUriString);
// add an owl:sameAs statement in the interlinking graph
getInterlinkGraph().add(new TripleImpl(bestNonHttp, OWL.sameAs, httpUriRef));
return httpUriRef;
}
private UriRef chooseBest(Set<UriRef> httpUri) {
Iterator<UriRef> iter = httpUri.iterator();
UriRef best = iter.next();
while (iter.hasNext()) {
UriRef next = iter.next();
if (next.getUnicodeString().compareTo(best.getUnicodeString()) < 0) {
best = next;
}
}
return best;
}
/**
* An inline class to canonicalize URI from urn to http scheme. A http URI is chosen
* among the equivalent ones.if no one http URI is available a new one is created.
*/
private class CanonicalizingSameAsSmusher extends IriSmusher {
@Override
protected UriRef getPreferedIri(Set<UriRef> uriRefs) {
Set<UriRef> httpUri = new HashSet<UriRef>();
for (UriRef uriRef : uriRefs) {
if (uriRef.getUnicodeString().startsWith("http")) {
httpUri.add(uriRef);
}
}
if (httpUri.size() == 1) {
return httpUri.iterator().next();
}
// There is no http URI in the set of equivalent resource. The entity was unknown.
// A new representation of the entity with http URI will be created.
if (httpUri.size() == 0) {
return generateNewHttpUri(uriRefs);
}
if (httpUri.size() > 1) {
return chooseBest(httpUri);
}
throw new Error("Negative size set.");
}
}
}
| implemented acquisition of digesters services references at bundle activation
| src/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java | implemented acquisition of digesters services references at bundle activation | <ide><path>rc/main/java/eu/fusepool/datalifecycle/SourcingAdmin.java
<ide> import org.apache.felix.scr.annotations.Property;
<ide> import org.apache.felix.scr.annotations.Reference;
<ide> import org.apache.felix.scr.annotations.Service;
<add>import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
<ide> import org.apache.stanbol.commons.web.viewable.RdfViewable;
<add>import org.osgi.framework.BundleContext;
<ide> import org.osgi.framework.Constants;
<add>import org.osgi.framework.InvalidSyntaxException;
<add>import org.osgi.framework.ServiceReference;
<ide> import org.osgi.service.component.ComponentContext;
<ide> import org.slf4j.Logger;
<ide> import org.slf4j.LoggerFactory;
<ide> * Using slf4j for normal logging
<ide> */
<ide> private static final Logger log = LoggerFactory.getLogger(SourcingAdmin.class);
<add>
<add> BundleContext bundleCtx = null;
<ide>
<ide> @Reference
<ide> private Parser parser;
<ide> @Reference
<ide> private Interlinker interlinker;
<ide>
<del> @Reference(target="(extractorType=patent)")
<del> private RdfDigester patentDigester;
<del>
<del> @Reference(target="(extractorType=pubmed)")
<del> private RdfDigester pubmedDigester;
<add> // text etractors references (RdfDigester implementations)
<add> ServiceReference[] digestersRefs = null;
<add>
<add> //@Reference(target="(extractorType=patent)")
<add> //private RdfDigester patentDigester;
<add>
<add> //@Reference(target="(extractorType=pubmed)")
<add> //private RdfDigester pubmedDigester;
<add>
<ide> /**
<ide> * This is the name of the graph in which we "log" the requests
<ide> */
<ide> private final int PUBLISH_DATA = 6;
<ide>
<ide> // RDFdigester
<del> private final String PUBMED_RDFDIGESTER = "pubmed";
<del> private final String PATENT_RDFDIGESTER = "patent";
<add> //private final String PUBMED_RDFDIGESTER = "pubmed";
<add> //private final String PATENT_RDFDIGESTER = "patent";
<ide>
<ide> // RDFizer
<ide> private final String PUBMED_RDFIZER = "pubmed";
<ide>
<ide> private UriRef pipeRef = null;
<ide>
<del> @Activate
<add> @SuppressWarnings("unchecked")
<add> @Activate
<ide> protected void activate(ComponentContext context) {
<ide>
<ide> log.info("The Sourcing Admin Service is being activated");
<ide> Dictionary<String,Object> dict = context.getProperties() ;
<ide> Object baseUriObj = dict.get(BASE_URI_NAME) ;
<ide> baseUri = baseUriObj.toString();
<add>
<add> // Get RDFDigesters
<add> bundleCtx = context.getBundleContext();
<add>
<add> // Get RDFDigesters references
<add> try {
<add>
<add> digestersRefs = bundleCtx.getServiceReferences(RdfDigester.class.getName(),"(digesterImpl=*)");
<add> if (digestersRefs != null) {
<add> for (ServiceReference digesterRef : digestersRefs) {
<add> String digesterImpl = (String) digesterRef.getProperty("digesterImpl");
<add> log.info("SourcingAdmin RDFDigester services available: " + digesterImpl);
<add>
<add> }
<add> }
<add> }
<add> catch (InvalidSyntaxException e) {
<add>
<add> e.printStackTrace();
<add> }
<add>
<add>
<ide>
<ide> try {
<ide> createDlcGraph();
<ide> public RdfViewable serviceEntry(@Context final UriInfo uriInfo,
<ide> @QueryParam("url") final UriRef url,
<ide> @HeaderParam("user-agent") String userAgent) throws Exception {
<del> //this maks sure we are nt invoked with a trailing slash which would affect
<add> //this makes sure we are nt invoked with a trailing slash which would affect
<ide> //relative resolution of links (e.g. css)
<ide> TrailingSlash.enforcePresent(uriInfo);
<ide>
<ide> //central serviceUri in the response
<ide> final UriRef serviceUri = new UriRef(resourcePath);
<ide> //the in memory graph to which the triples for the response are added
<del> //final MGraph responseGraph = new IndexedMGraph();
<add> final MGraph responseGraph = new IndexedMGraph();
<add> Lock rl = getDlcGraph().getLock().readLock();
<add> rl.lock();
<add> try {
<add> responseGraph.addAll(getDlcGraph());
<add> }
<add> finally {
<add> rl.unlock();
<add> }
<add>
<add> // Add information about the available digester services
<add> for (ServiceReference digesterRef : digestersRefs) {
<add> String digesterImpl = (String) digesterRef.getProperty("digesterImpl");
<add> responseGraph.add(new TripleImpl(DATA_LIFECYCLE_GRAPH_REFERENCE, Ontology.service, new UriRef("urn:x-temp:/" + digesterImpl)));
<add> responseGraph.add(new TripleImpl(new UriRef("urn:x-temp:/" + digesterImpl), RDFS.label, new PlainLiteralImpl(digesterImpl)));
<add>
<add> }
<add>
<ide> //This GraphNode represents the service within our result graph
<del> //final GraphNode node = new GraphNode(serviceUri, responseGraph);
<del> //node.addProperty(Ontology.graph, new UriRef("http://fusepool.com/graphs/patentdata"));
<del> //node.addPropertyValue(RDFS.label, "A graph of patent data");
<add> final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, responseGraph);
<add>
<ide> //What we return is the GraphNode we created with a template path
<del> final GraphNode node = new GraphNode(DATA_LIFECYCLE_GRAPH_REFERENCE, getDlcGraph());
<del>
<ide> return new RdfViewable("SourcingAdmin", node, SourcingAdmin.class);
<ide> }
<ide>
<ide> message = smush(pipeRef);
<ide> break;
<ide> case TEXT_EXTRACTION:
<del> message = extractText(pipeRef, rdfdigester);
<add> message = extractTextFromRdf(pipeRef, rdfdigester);
<ide> break;
<ide> case RDFIZE:
<ide> message = transformXml(dataUrl, rdfizer);
<ide>
<ide> }
<ide>
<del> private String extractText(UriRef pipeRef, String rdfdigester) {
<del> String message = "";
<del>
<del> if(PATENT_RDFDIGESTER.equals(rdfdigester)){
<del> message = extractTextFromPatent(pipeRef);
<del> }
<del> else if (PUBMED_RDFDIGESTER.equals(rdfdigester)) {
<del> message = extractTextFromPubMed(pipeRef);
<del> }
<del>
<del> return message;
<del> }
<ide>
<ide> /**
<ide> * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
<ide> * @param pipeRef
<ide> * @return
<ide> */
<add> private String extractTextFromRdf(UriRef pipeRef, String selectedDigester){
<add>
<add> String message = "";
<add> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
<add> MGraph enhanceGraph = tcManager.getMGraph(enhanceGraphRef);
<add> UriRef sourceGraphRef = new UriRef(pipeRef.getUnicodeString() + SOURCE_GRAPH_URN_SUFFIX);
<add> LockableMGraph sourceGraph = tcManager.getMGraph(sourceGraphRef);
<add>
<add> SimpleMGraph tempGraph = new SimpleMGraph();
<add> Lock rl = sourceGraph.getLock().readLock();
<add> rl.lock();
<add> try {
<add> tempGraph.addAll(sourceGraph);
<add> }
<add> finally {
<add> rl.unlock();
<add> }
<add>
<add> enhanceGraph.addAll(tempGraph);
<add>
<add> for (ServiceReference digesterRef : digestersRefs) {
<add> String digesterType = (String) digesterRef.getProperty("digesterImpl");
<add> if(selectedDigester.equals(digesterType)) {
<add> RdfDigester digester = (RdfDigester) bundleCtx.getService(digesterRef);
<add> digester.extractText(enhanceGraph);
<add> message += "Extracted text from " + enhanceGraphRef.getUnicodeString() + " by " + digesterType + " digester";
<add>
<add> }
<add>
<add> }
<add>
<add>
<add> return message;
<add> }
<add>
<add>
<add> /**
<add> * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
<add> * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
<add> * will be related to a patent (resource of type pmo:PatentPublication) so that the patent will be retrieved anytime
<add> * the keyword is searched. The extractor also takes all the entities extracted by NLP enhancement engines. These entities
<add> * and a rdfs:label if available, are added to the patent resource using dcterms:subject property.
<add> * @param pipeRef
<add> * @return
<add> */
<add> /*
<ide> private String extractTextFromPatent(UriRef pipeRef){
<ide> String message = "Extracts text from patents and adds a sioc:content property.\n";
<ide> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
<ide>
<ide> return message;
<ide> }
<del>
<add> */
<ide> /**
<ide> * Extract text from dcterms:title and dcterms:abstract fields in the source graph and adds a sioc:content
<ide> * property with that text in the enhance graph. The text is used by the ECS for indexing. The keywords
<ide> * @param pipeRef
<ide> * @return
<ide> */
<add> /*
<ide> private String extractTextFromPubMed(UriRef pipeRef){
<ide> String message = "Extract text from PubMed articles and adding a sioc:content property.\n";
<ide> UriRef enhanceGraphRef = new UriRef(pipeRef.getUnicodeString() + ENHANCE_GRAPH_URN_SUFFIX);
<ide>
<ide> return message;
<ide> }
<add>
<add> */
<ide>
<ide> /**
<ide> * Moves data from smush.grah to content.graph. The triples (facts) in the two graphs must be coherent, i.e. the same. |
|
Java | apache-2.0 | 04abb5fba497bc64c15c2825b554db4860894199 | 0 | SirWellington/alchemy-test,SirWellington/alchemy-test | /*
* Copyright © 2018. Sir Wellington.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tech.sirwellington.alchemy.test.junit.runners;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import tech.sirwellington.alchemy.annotations.access.Internal;
import tech.sirwellington.alchemy.annotations.access.NonInstantiable;
import tech.sirwellington.alchemy.generator.AlchemyGenerator;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static tech.sirwellington.alchemy.generator.NumberGenerators.*;
import static tech.sirwellington.alchemy.test.Checks.Internal.checkNotNull;
import static tech.sirwellington.alchemy.test.Checks.Internal.checkThat;
import static tech.sirwellington.alchemy.test.junit.runners.GenerateDouble.Type.POSITIVE;
import static tech.sirwellington.alchemy.test.junit.runners.GenerateDouble.Type.RANGE;
/**
* Used in with the {@link AlchemyTestRunner}, this Annotations allows the
* Runtime Injection of Generated Doubles from the {@link AlchemyGenerator} library.
* <p>
* Example:
* <pre>
* {@code
* `@RunWith(AlchemyTestRunner.class)
* public class ExampleTest
* {
* `@GenerateDouble(POSITIVE)
* private double percentage;
*
* ...
* }
* }
* </pre>
* Note, ticks (`) used to escape Javadocs.
*
* @author SirWellington
* @see GenerateInteger
* @see GenerateLong
* @see GenerateString
*/
@Target(FIELD)
@Retention(RUNTIME)
public @interface GenerateDouble
{
Type value() default POSITIVE;
double min() default 0.0;
double max() default 1.0;
public enum Type
{
POSITIVE,
NEGATIVE,
ANY,
RANGE;
}
@Internal
@NonInstantiable
class Values
{
private Values() throws IllegalAccessException
{
throw new IllegalAccessException("cannot instantiate");
}
static AlchemyGenerator<Double> createGeneratorFor(GenerateDouble annotation)
{
checkNotNull(annotation, "missing annotation");
Type type = annotation.value();
checkNotNull(type, "@GenerateDouble missing value");
if (type == RANGE)
{
double min = annotation.min();
double max = annotation.max();
checkThat(min < max, "@GenerateDouble: min must be less than max");
return doubles(min, max);
}
//Cover remaining cases
switch (type)
{
case POSITIVE:
return positiveDoubles();
case NEGATIVE:
return negativeDoubles();
default:
return anyDoubles();
}
}
}
}
| src/main/java/tech/sirwellington/alchemy/test/junit/runners/GenerateDouble.java | /*
* Copyright © 2018. Sir Wellington.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tech.sirwellington.alchemy.test.junit.runners;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import tech.sirwellington.alchemy.annotations.access.Internal;
import tech.sirwellington.alchemy.annotations.access.NonInstantiable;
import tech.sirwellington.alchemy.generator.AlchemyGenerator;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static tech.sirwellington.alchemy.generator.NumberGenerators.doubles;
import static tech.sirwellington.alchemy.generator.NumberGenerators.positiveDoubles;
import static tech.sirwellington.alchemy.test.Checks.Internal.checkNotNull;
import static tech.sirwellington.alchemy.test.Checks.Internal.checkThat;
import static tech.sirwellington.alchemy.test.junit.runners.GenerateDouble.Type.POSITIVE;
import static tech.sirwellington.alchemy.test.junit.runners.GenerateDouble.Type.RANGE;
/**
* Used in with the {@link AlchemyTestRunner}, this Annotations allows the
* Runtime Injection of Generated Doubles from the {@link AlchemyGenerator} library.
* <p>
* Example:
* <pre>
* {@code
* `@RunWith(AlchemyTestRunner.class)
* public class ExampleTest
* {
* `@GenerateDouble(POSITIVE)
* private double percentage;
*
* ...
* }
* }
* </pre>
* Note, ticks (`) used to escape Javadocs.
*
* @author SirWellington
* @see GenerateInteger
* @see GenerateLong
* @see GenerateString
*/
@Target(FIELD)
@Retention(RUNTIME)
public @interface GenerateDouble
{
Type value() default POSITIVE;
double min() default 0.0;
double max() default 1.0;
public enum Type
{
POSITIVE,
NEGATIVE,
ANY,
RANGE;
}
@Internal
@NonInstantiable
class Values
{
private Values() throws IllegalAccessException
{
throw new IllegalAccessException("cannot instantiate");
}
static AlchemyGenerator<Double> createGeneratorFor(GenerateDouble annotation)
{
checkNotNull(annotation, "missing annotation");
Type type = annotation.value();
checkNotNull(type, "@GenerateDouble missing value");
if (type == RANGE)
{
double min = annotation.min();
double max = annotation.max();
checkThat(min < max, "@GenerateDouble: min must be less than max");
return doubles(min, max);
}
//Cover remaining cases
switch (type)
{
case POSITIVE:
return positiveDoubles();
case NEGATIVE:
return doubles(-Double.MAX_VALUE, -Double.MAX_VALUE);
default:
return doubles(-Double.MAX_VALUE, Double.MAX_VALUE);
}
}
}
}
| Refines @GenerateDouble
| src/main/java/tech/sirwellington/alchemy/test/junit/runners/GenerateDouble.java | Refines @GenerateDouble | <ide><path>rc/main/java/tech/sirwellington/alchemy/test/junit/runners/GenerateDouble.java
<ide>
<ide> import static java.lang.annotation.ElementType.FIELD;
<ide> import static java.lang.annotation.RetentionPolicy.RUNTIME;
<del>import static tech.sirwellington.alchemy.generator.NumberGenerators.doubles;
<del>import static tech.sirwellington.alchemy.generator.NumberGenerators.positiveDoubles;
<add>import static tech.sirwellington.alchemy.generator.NumberGenerators.*;
<ide> import static tech.sirwellington.alchemy.test.Checks.Internal.checkNotNull;
<ide> import static tech.sirwellington.alchemy.test.Checks.Internal.checkThat;
<ide> import static tech.sirwellington.alchemy.test.junit.runners.GenerateDouble.Type.POSITIVE;
<ide> case POSITIVE:
<ide> return positiveDoubles();
<ide> case NEGATIVE:
<del> return doubles(-Double.MAX_VALUE, -Double.MAX_VALUE);
<add> return negativeDoubles();
<ide> default:
<del> return doubles(-Double.MAX_VALUE, Double.MAX_VALUE);
<add> return anyDoubles();
<ide> }
<ide> }
<ide> |
|
JavaScript | apache-2.0 | fatal: invalid reference: FETCH_HEAD^
| c976b0d9570293524b30471f4801142fc4415809 | 128 | SAPDshell/dshell | var sys = require('sys'),
url = require('url'),
exec = require('child_process').exec,
http = require('http'),
connect = require('connect'),
rimraf = require('rimraf')
var crypto = require('crypto');
var fs = require('fs');
repMiddleware = function worseThanUselessMiddleware(req, res, next) {
try {
var rep = url.parse(req.url, true).query.repo;
if(rep) {
req.rep = rep;
next();
}
else {
res.end("No repo");
}
} catch (e) {
res.end(e.toString());
}
}
dirMiddleware = function worseThanUselessMiddleware(req, res, next) {
try {
var dir = 'z' + crypto.randomBytes(4).readUInt32LE(0),
end = res.end;
// res.end = function(chunk, encoding) {
// res.end = end;
// rimraf(req.dir, function() {
// res.end(chunk, encoding);
// })
// }
fs.mkdir(dir, function(e) {
req.dir = dir;
next();
})
}
catch (e) {
res.end(e.toString());
}
}
var app = connect()
.use(repMiddleware)
.use(dirMiddleware)
.use(function(req, res){
exec("git clone " + req.rep + " repo", {cwd: req.dir}, function(err, stdout, stderr) {
console.log(err, stdout, stderr)
var deploy = JSON.parse(fs.readFileSync(req.dir + "/repo/deployment.json"));
console.log(deploy)
install(deploy.strategy, req.dir, function(err, strategy) {
if (err) {
res.writeHead(400);
res.end(err.toString());
return;
}
console.log("call strategy", strategy);
strategy(req.dir + "/repo", deploy, function(err, msg) {
res.writeHead(200, {
'Content-Type': 'text/plain'
});
res.end(msg);
});
});
})
}).listen(8080);
var install = function(strategy, dir, cb) {
var npm = require("npm")
// https://npmjs.org/api/npm.html
npm.load({}, function(err, npm) {
// npm object loaded
npm.commands.install([strategy], function(err, modules) {
if(err) {
cb(err)
}
try {
console.log(modules[0][0].split("@")[0])
cb(null, require(modules[0][0].split("@")[0]));
} catch (ex) {
cb(ex);
}
})
})
} | dj.js | initial commit
| dj.js | initial commit | <ide><path>j.js
<add>var sys = require('sys'),
<add> url = require('url'),
<add> exec = require('child_process').exec,
<add> http = require('http'),
<add> connect = require('connect'),
<add> rimraf = require('rimraf')
<add>
<add>var crypto = require('crypto');
<add>var fs = require('fs');
<add>
<add>repMiddleware = function worseThanUselessMiddleware(req, res, next) {
<add> try {
<add> var rep = url.parse(req.url, true).query.repo;
<add> if(rep) {
<add> req.rep = rep;
<add> next();
<add> }
<add> else {
<add> res.end("No repo");
<add> }
<add> } catch (e) {
<add> res.end(e.toString());
<add> }
<add>}
<add>
<add>dirMiddleware = function worseThanUselessMiddleware(req, res, next) {
<add> try {
<add> var dir = 'z' + crypto.randomBytes(4).readUInt32LE(0),
<add> end = res.end;
<add> // res.end = function(chunk, encoding) {
<add> // res.end = end;
<add> // rimraf(req.dir, function() {
<add> // res.end(chunk, encoding);
<add> // })
<add> // }
<add> fs.mkdir(dir, function(e) {
<add> req.dir = dir;
<add> next();
<add> })
<add> }
<add> catch (e) {
<add> res.end(e.toString());
<add> }
<add>}
<add>
<add>var app = connect()
<add> .use(repMiddleware)
<add> .use(dirMiddleware)
<add> .use(function(req, res){
<add> exec("git clone " + req.rep + " repo", {cwd: req.dir}, function(err, stdout, stderr) {
<add>
<add> console.log(err, stdout, stderr)
<add>
<add> var deploy = JSON.parse(fs.readFileSync(req.dir + "/repo/deployment.json"));
<add>
<add> console.log(deploy)
<add>
<add> install(deploy.strategy, req.dir, function(err, strategy) {
<add> if (err) {
<add> res.writeHead(400);
<add> res.end(err.toString());
<add> return;
<add> }
<add> console.log("call strategy", strategy);
<add> strategy(req.dir + "/repo", deploy, function(err, msg) {
<add> res.writeHead(200, {
<add> 'Content-Type': 'text/plain'
<add> });
<add> res.end(msg);
<add> });
<add> });
<add> })
<add> }).listen(8080);
<add>
<add>var install = function(strategy, dir, cb) {
<add> var npm = require("npm")
<add> // https://npmjs.org/api/npm.html
<add> npm.load({}, function(err, npm) {
<add> // npm object loaded
<add> npm.commands.install([strategy], function(err, modules) {
<add> if(err) {
<add> cb(err)
<add> }
<add> try {
<add> console.log(modules[0][0].split("@")[0])
<add> cb(null, require(modules[0][0].split("@")[0]));
<add> } catch (ex) {
<add> cb(ex);
<add> }
<add> })
<add> })
<add>} |
|
Java | apache-2.0 | b9fde2362894efb565d1f2c9b0443ddb608a0d8c | 0 | chrisrhoden/PlayerHater,PRX/PlayerHater | package org.prx.android.playerhater;
import android.media.AudioManager;
public class OnAudioFocusChangeListener implements
AudioManager.OnAudioFocusChangeListener {
// 5 seconds
public static final int REWIND_ON_RESUME_DURATION = 5000;
// 5 minutes
public static final int SKIP_RESUME_AFTER_DURATION = 300000;
private PlayerHaterService mService;
private long pausedAt;
private boolean isBeingDucked;
public OnAudioFocusChangeListener(PlayerHaterService service) {
mService = service;
isBeingDucked = false;
}
@Override
public void onAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
// Good, glad to hear it.
if (isBeingDucked && !mService.isPlaying()) {
isBeingDucked = false;
if (pausedAt + (SKIP_RESUME_AFTER_DURATION) > System
.currentTimeMillis()) {
try {
mService.play();
} catch (Exception e) {
// Probably illegal state, don't care.
}
}
}
break;
case AudioManager.AUDIOFOCUS_LOSS:
// Oh, no! Ok, let's handle that.
if (mService.isPlaying()) {
mService.stop();
}
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
// Let's pause, expecting it to come back.
if (mService.isPlaying()) {
pausedAt = System.currentTimeMillis();
isBeingDucked = true;
mService.pause();
mService.seekTo(Math.max(0, mService.getCurrentPosition()
- REWIND_ON_RESUME_DURATION));
}
break;
default:
// Dunno.
}
}
} | src/org/prx/android/playerhater/OnAudioFocusChangeListener.java | package org.prx.android.playerhater;
import android.media.AudioManager;
public class OnAudioFocusChangeListener implements
android.media.AudioManager.OnAudioFocusChangeListener {
private PlayerHaterService mService;
private boolean isBeingDucked;
public OnAudioFocusChangeListener(PlayerHaterService service) {
mService = service;
isBeingDucked = false;
}
@Override
public void onAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
// Good, glad to hear it.
if (isBeingDucked && !mService.isPlaying()) {
isBeingDucked = false;
try {
mService.play();
} catch (Exception e) {
// Probably illegal state, don't care.
}
}
break;
case AudioManager.AUDIOFOCUS_LOSS:
// Oh, no! Ok, let's handle that.
if (mService.isPlaying()) {
mService.stop();
}
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
// Let's pause, expecting it to come back.
if (mService.isPlaying()) {
isBeingDucked = true;
mService.pause();
mService.seekTo(Math.max(0, mService.getCurrentPosition() - 3000));
}
break;
default:
// Dunno.
}
}
}
| playback does not resume after some period of time being interrupted
| src/org/prx/android/playerhater/OnAudioFocusChangeListener.java | playback does not resume after some period of time being interrupted | <ide><path>rc/org/prx/android/playerhater/OnAudioFocusChangeListener.java
<ide> import android.media.AudioManager;
<ide>
<ide> public class OnAudioFocusChangeListener implements
<del> android.media.AudioManager.OnAudioFocusChangeListener {
<add> AudioManager.OnAudioFocusChangeListener {
<add>
<add> // 5 seconds
<add> public static final int REWIND_ON_RESUME_DURATION = 5000;
<add>
<add> // 5 minutes
<add> public static final int SKIP_RESUME_AFTER_DURATION = 300000;
<ide>
<ide> private PlayerHaterService mService;
<add> private long pausedAt;
<ide> private boolean isBeingDucked;
<ide>
<ide> public OnAudioFocusChangeListener(PlayerHaterService service) {
<ide> // Good, glad to hear it.
<ide> if (isBeingDucked && !mService.isPlaying()) {
<ide> isBeingDucked = false;
<del> try {
<del> mService.play();
<del> } catch (Exception e) {
<del> // Probably illegal state, don't care.
<add> if (pausedAt + (SKIP_RESUME_AFTER_DURATION) > System
<add> .currentTimeMillis()) {
<add> try {
<add> mService.play();
<add> } catch (Exception e) {
<add> // Probably illegal state, don't care.
<add> }
<ide> }
<ide> }
<ide> break;
<ide> case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
<ide> // Let's pause, expecting it to come back.
<ide> if (mService.isPlaying()) {
<add> pausedAt = System.currentTimeMillis();
<ide> isBeingDucked = true;
<ide> mService.pause();
<del> mService.seekTo(Math.max(0, mService.getCurrentPosition() - 3000));
<add> mService.seekTo(Math.max(0, mService.getCurrentPosition()
<add> - REWIND_ON_RESUME_DURATION));
<ide> }
<ide> break;
<ide> default: |
|
Java | apache-2.0 | error: pathspec 'src/main/java/com/terradue/dsi/model/AccountOwner.java' did not match any file(s) known to git
| feab2955523b92ea5940ad14d5e93f0126d05b8c | 1 | Terradue/dsi-tools | package com.terradue.dsi.model;
/*
* Copyright 2012 Terradue srl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static javax.xml.bind.annotation.XmlAccessType.FIELD;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
/**
* @since 0.2
*/
@XmlAccessorType( FIELD )
public final class AccountOwner
{
@XmlElement
private String id;
@XmlElement
private String firstName;
@XmlElement
private String lastName;
@XmlElement
private String email;
@XmlElement
private String login;
@XmlElement
private boolean locked;
@XmlElement( name = "passwordSet" )
private boolean pwdSet;
/**
* @return the id
*/
public String getId()
{
return id;
}
/**
* @param id the id to set
*/
public void setId( String id )
{
this.id = id;
}
/**
* @return the firstName
*/
public String getFirstName()
{
return firstName;
}
/**
* @param firstName the firstName to set
*/
public void setFirstName( String firstName )
{
this.firstName = firstName;
}
/**
* @return the lastName
*/
public String getLastName()
{
return lastName;
}
/**
* @param lastName the lastName to set
*/
public void setLastName( String lastName )
{
this.lastName = lastName;
}
/**
* @return the email
*/
public String getEmail()
{
return email;
}
/**
* @param email the email to set
*/
public void setEmail( String email )
{
this.email = email;
}
/**
* @return the login
*/
public String getLogin()
{
return login;
}
/**
* @param login the login to set
*/
public void setLogin( String login )
{
this.login = login;
}
/**
* @return the locked
*/
public boolean isLocked()
{
return locked;
}
/**
* @param locked the locked to set
*/
public void setLocked( boolean locked )
{
this.locked = locked;
}
/**
* @return the pwdSet
*/
public boolean isPwdSet()
{
return pwdSet;
}
/**
* @param pwdSet the pwdSet to set
*/
public void setPwdSet( boolean pwdSet )
{
this.pwdSet = pwdSet;
}
@Override
public String toString()
{
return login;
}
}
| src/main/java/com/terradue/dsi/model/AccountOwner.java | initial checkin of AccountOwner entity | src/main/java/com/terradue/dsi/model/AccountOwner.java | initial checkin of AccountOwner entity | <ide><path>rc/main/java/com/terradue/dsi/model/AccountOwner.java
<add>package com.terradue.dsi.model;
<add>
<add>/*
<add> * Copyright 2012 Terradue srl
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>import static javax.xml.bind.annotation.XmlAccessType.FIELD;
<add>
<add>import javax.xml.bind.annotation.XmlAccessorType;
<add>import javax.xml.bind.annotation.XmlElement;
<add>
<add>/**
<add> * @since 0.2
<add> */
<add>@XmlAccessorType( FIELD )
<add>public final class AccountOwner
<add>{
<add>
<add> @XmlElement
<add> private String id;
<add>
<add> @XmlElement
<add> private String firstName;
<add>
<add> @XmlElement
<add> private String lastName;
<add>
<add> @XmlElement
<add> private String email;
<add>
<add> @XmlElement
<add> private String login;
<add>
<add> @XmlElement
<add> private boolean locked;
<add>
<add> @XmlElement( name = "passwordSet" )
<add> private boolean pwdSet;
<add>
<add> /**
<add> * @return the id
<add> */
<add> public String getId()
<add> {
<add> return id;
<add> }
<add>
<add> /**
<add> * @param id the id to set
<add> */
<add> public void setId( String id )
<add> {
<add> this.id = id;
<add> }
<add>
<add> /**
<add> * @return the firstName
<add> */
<add> public String getFirstName()
<add> {
<add> return firstName;
<add> }
<add>
<add> /**
<add> * @param firstName the firstName to set
<add> */
<add> public void setFirstName( String firstName )
<add> {
<add> this.firstName = firstName;
<add> }
<add>
<add> /**
<add> * @return the lastName
<add> */
<add> public String getLastName()
<add> {
<add> return lastName;
<add> }
<add>
<add> /**
<add> * @param lastName the lastName to set
<add> */
<add> public void setLastName( String lastName )
<add> {
<add> this.lastName = lastName;
<add> }
<add>
<add> /**
<add> * @return the email
<add> */
<add> public String getEmail()
<add> {
<add> return email;
<add> }
<add>
<add> /**
<add> * @param email the email to set
<add> */
<add> public void setEmail( String email )
<add> {
<add> this.email = email;
<add> }
<add>
<add> /**
<add> * @return the login
<add> */
<add> public String getLogin()
<add> {
<add> return login;
<add> }
<add>
<add> /**
<add> * @param login the login to set
<add> */
<add> public void setLogin( String login )
<add> {
<add> this.login = login;
<add> }
<add>
<add> /**
<add> * @return the locked
<add> */
<add> public boolean isLocked()
<add> {
<add> return locked;
<add> }
<add>
<add> /**
<add> * @param locked the locked to set
<add> */
<add> public void setLocked( boolean locked )
<add> {
<add> this.locked = locked;
<add> }
<add>
<add> /**
<add> * @return the pwdSet
<add> */
<add> public boolean isPwdSet()
<add> {
<add> return pwdSet;
<add> }
<add>
<add> /**
<add> * @param pwdSet the pwdSet to set
<add> */
<add> public void setPwdSet( boolean pwdSet )
<add> {
<add> this.pwdSet = pwdSet;
<add> }
<add>
<add> @Override
<add> public String toString()
<add> {
<add> return login;
<add> }
<add>
<add>} |
|
Java | mit | 04018ecf1651ace5f434eddae5a7fdf342902d2c | 0 | lkrnac/blog-2015-11-mock-spring-bean-v2 | package net.lkrnac.blog.testing.mockbeanv2.aoptesting;
import org.mockito.Mockito;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Repository;
import lombok.Getter;
import net.lkrnac.blog.testing.mockbeanv2.beans.AddressDao;
@Primary
@Repository
@Profile("AddressService-aop-mock-test")
public class AddressDaoMock extends AddressDao{
@Getter
private AddressDao mockDelegate = Mockito.mock(AddressDao.class);
public String readAddress(String userName) {
return mockDelegate.readAddress(userName);
}
}
| src/test/java/net/lkrnac/blog/testing/mockbeanv2/aoptesting/AddressDaoMock.java | package net.lkrnac.blog.testing.mockbeanv2.aoptesting;
import org.mockito.Mockito;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Repository;
import lombok.Getter;
import net.lkrnac.blog.testing.mockbeanv2.beans.AddressDao;
@Primary
@Repository
@Profile("AddressService-aop-mock-test")
public class AddressDaoMock extends AddressDao{
@Getter
private AddressDaoMock mockDelegate = Mockito.mock(AddressDaoMock.class);
public String readAddress(String userName) {
return mockDelegate.readAddress(userName);
}
}
| Change type of delegate
| src/test/java/net/lkrnac/blog/testing/mockbeanv2/aoptesting/AddressDaoMock.java | Change type of delegate | <ide><path>rc/test/java/net/lkrnac/blog/testing/mockbeanv2/aoptesting/AddressDaoMock.java
<ide> @Profile("AddressService-aop-mock-test")
<ide> public class AddressDaoMock extends AddressDao{
<ide> @Getter
<del> private AddressDaoMock mockDelegate = Mockito.mock(AddressDaoMock.class);
<add> private AddressDao mockDelegate = Mockito.mock(AddressDao.class);
<ide>
<ide> public String readAddress(String userName) {
<ide> return mockDelegate.readAddress(userName); |
|
Java | mit | 6e14b241448e444f80b6d493be50d6246680c64f | 0 | ngageoint/geopackage-java,ngageoint/geopackage-java | package mil.nga.geopackage.test.extension.properties;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import junit.framework.TestCase;
import mil.nga.geopackage.extension.Extensions;
import mil.nga.geopackage.extension.properties.PropertiesExtension;
import mil.nga.geopackage.extension.properties.PropertyNames;
import mil.nga.geopackage.test.CreateGeoPackageTestCase;
import org.junit.Test;
/**
* Properties Extension Tests
*
* @author osbornb
*/
public class PropertiesExtensionTest extends CreateGeoPackageTestCase {
/**
* Test properties extension
*/
@Test
public void testPropertiesExtension() {
PropertiesExtension extension = new PropertiesExtension(geoPackage);
TestCase.assertFalse(extension.has());
TestCase.assertFalse(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
Extensions extensions = extension.getOrCreate();
TestCase.assertNotNull(extensions);
TestCase.assertTrue(extension.has());
TestCase.assertTrue(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
TestCase.assertEquals(0, extension.numProperties());
TestCase.assertTrue(extension.getProperties().isEmpty());
TestCase.assertEquals(0, extension.numValues());
TestCase.assertTrue(extension.getValues(PropertyNames.TITLE).isEmpty());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TITLE));
TestCase.assertFalse(extension.hasValues(PropertyNames.TITLE));
TestCase.assertEquals(0, extension.numValues(PropertyNames.TITLE));
final String name = "My GeoPackage";
TestCase.assertTrue(extension.addValue(PropertyNames.TITLE, name));
TestCase.assertEquals(1, extension.numProperties());
TestCase.assertEquals(1, extension.getProperties().size());
TestCase.assertEquals(1, extension.numValues());
TestCase.assertEquals(1, extension.getValues(PropertyNames.TITLE)
.size());
TestCase.assertTrue(extension.hasSingleValue(PropertyNames.TITLE));
TestCase.assertTrue(extension.hasValues(PropertyNames.TITLE));
TestCase.assertEquals(1, extension.numValues(PropertyNames.TITLE));
TestCase.assertEquals(1, extension.numValues());
TestCase.assertEquals(name, extension.getValue(PropertyNames.TITLE));
TestCase.assertTrue(extension.hasValue(PropertyNames.TITLE, name));
final String tag = "TAG";
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 1));
TestCase.assertEquals(2, extension.numProperties());
TestCase.assertEquals(2, extension.getProperties().size());
TestCase.assertEquals(2, extension.numValues());
TestCase.assertEquals(1, extension.getValues(PropertyNames.TAG).size());
TestCase.assertTrue(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(1, extension.numValues(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + 1));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 2));
TestCase.assertEquals(2, extension.numProperties());
TestCase.assertEquals(2, extension.getProperties().size());
TestCase.assertEquals(3, extension.numValues());
TestCase.assertEquals(2, extension.getValues(PropertyNames.TAG).size());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(2, extension.numValues(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + 2));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 3));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 4));
TestCase.assertFalse(extension.addValue(PropertyNames.TAG, tag + 4));
Set<String> values = new HashSet<>(
extension.getValues(PropertyNames.TAG));
for (int i = 1; i <= 4; i++) {
TestCase.assertTrue(values.contains(tag + i));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + i));
}
TestCase.assertEquals(1,
extension.deleteValue(PropertyNames.TAG, tag + 3));
TestCase.assertEquals(3, extension.getValues(PropertyNames.TAG).size());
TestCase.assertEquals(3, extension.numValues(PropertyNames.TAG));
TestCase.assertFalse(extension.hasValue(PropertyNames.TAG, tag + 3));
TestCase.assertEquals(3, extension.deleteProperty(PropertyNames.TAG));
TestCase.assertEquals(1, extension.numProperties());
TestCase.assertEquals(1, extension.getProperties().size());
TestCase.assertEquals(1, extension.numValues());
TestCase.assertTrue(extension.getValues(PropertyNames.TAG).isEmpty());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertFalse(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(0, extension.numValues(PropertyNames.TAG));
extension.removeExtension();
TestCase.assertFalse(extension.has());
TestCase.assertFalse(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
}
/**
* Test property names
*/
@Test
public void testPropertyNames() {
PropertiesExtension extension = new PropertiesExtension(geoPackage);
extension.getOrCreate();
int count = 0;
count += testPropertyName(extension, PropertyNames.CONTRIBUTOR);
count += testPropertyName(extension, PropertyNames.COVERAGE);
count += testPropertyName(extension, PropertyNames.CREATED);
count += testPropertyName(extension, PropertyNames.CREATOR);
count += testPropertyName(extension, PropertyNames.DATE);
count += testPropertyName(extension, PropertyNames.DESCRIPTION);
count += testPropertyName(extension, PropertyNames.IDENTIFIER);
count += testPropertyName(extension, PropertyNames.LICENSE);
count += testPropertyName(extension, PropertyNames.MODIFIED);
count += testPropertyName(extension, PropertyNames.PUBLISHER);
count += testPropertyName(extension, PropertyNames.REFERENCES);
count += testPropertyName(extension, PropertyNames.RELATION);
count += testPropertyName(extension, PropertyNames.SOURCE);
count += testPropertyName(extension, PropertyNames.SPATIAL);
count += testPropertyName(extension, PropertyNames.SUBJECT);
count += testPropertyName(extension, PropertyNames.TAG);
count += testPropertyName(extension, PropertyNames.TEMPORAL);
count += testPropertyName(extension, PropertyNames.TITLE);
count += testPropertyName(extension, PropertyNames.TYPE);
count += testPropertyName(extension, PropertyNames.URI);
count += testPropertyName(extension, PropertyNames.VALID);
count += testPropertyName(extension, PropertyNames.VERSION);
TestCase.assertEquals(22, extension.numProperties());
TestCase.assertEquals(count, extension.numValues());
TestCase.assertEquals(count, extension.deleteAll());
TestCase.assertEquals(0, extension.numProperties());
TestCase.assertEquals(0, extension.numValues());
extension.removeExtension();
TestCase.assertFalse(extension.has());
}
private int testPropertyName(PropertiesExtension extension, String property) {
TestCase.assertFalse(extension.hasProperty(property));
int count = 1;
if (Math.random() < .5) {
count = 1 + (int) (10 * Math.random());
}
Set<String> values = new HashSet<>();
for (int i = 0; i < count; i++) {
String value = UUID.randomUUID().toString();
values.add(value);
extension.addValue(property, value);
}
TestCase.assertTrue(extension.hasProperty(property));
TestCase.assertEquals(count, extension.numValues(property));
TestCase.assertEquals(count == 1, extension.hasSingleValue(property));
TestCase.assertTrue(extension.hasValues(property));
List<String> propertyValues = extension.getValues(property);
TestCase.assertEquals(values.size(), propertyValues.size());
for (String value : propertyValues) {
TestCase.assertTrue(values.contains(value));
TestCase.assertTrue(extension.hasValue(property, value));
}
return count;
}
}
| src/test/java/mil/nga/geopackage/test/extension/properties/PropertiesExtensionTest.java | package mil.nga.geopackage.test.extension.properties;
import java.util.HashSet;
import java.util.Set;
import junit.framework.TestCase;
import mil.nga.geopackage.extension.Extensions;
import mil.nga.geopackage.extension.properties.PropertiesExtension;
import mil.nga.geopackage.extension.properties.PropertyNames;
import mil.nga.geopackage.test.CreateGeoPackageTestCase;
import org.junit.Test;
/**
* Properties Extension Tests
*
* @author osbornb
*/
public class PropertiesExtensionTest extends CreateGeoPackageTestCase {
/**
* Test properties extension
*/
@Test
public void testPropertiesExtension() {
PropertiesExtension extension = new PropertiesExtension(geoPackage);
TestCase.assertFalse(extension.has());
TestCase.assertFalse(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
Extensions extensions = extension.getOrCreate();
TestCase.assertNotNull(extensions);
TestCase.assertTrue(extension.has());
TestCase.assertTrue(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
TestCase.assertEquals(0, extension.numProperties());
TestCase.assertTrue(extension.getProperties().isEmpty());
TestCase.assertEquals(0, extension.numValues());
TestCase.assertTrue(extension.getValues(PropertyNames.TITLE).isEmpty());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TITLE));
TestCase.assertFalse(extension.hasValues(PropertyNames.TITLE));
TestCase.assertEquals(0, extension.numValues(PropertyNames.TITLE));
final String name = "My GeoPackage";
TestCase.assertTrue(extension.addValue(PropertyNames.TITLE, name));
TestCase.assertEquals(1, extension.numProperties());
TestCase.assertEquals(1, extension.getProperties().size());
TestCase.assertEquals(1, extension.numValues());
TestCase.assertEquals(1, extension.getValues(PropertyNames.TITLE)
.size());
TestCase.assertTrue(extension.hasSingleValue(PropertyNames.TITLE));
TestCase.assertTrue(extension.hasValues(PropertyNames.TITLE));
TestCase.assertEquals(1, extension.numValues(PropertyNames.TITLE));
TestCase.assertEquals(1, extension.numValues());
TestCase.assertEquals(name, extension.getValue(PropertyNames.TITLE));
TestCase.assertTrue(extension.hasValue(PropertyNames.TITLE, name));
final String tag = "TAG";
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 1));
TestCase.assertEquals(2, extension.numProperties());
TestCase.assertEquals(2, extension.getProperties().size());
TestCase.assertEquals(2, extension.numValues());
TestCase.assertEquals(1, extension.getValues(PropertyNames.TAG).size());
TestCase.assertTrue(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(1, extension.numValues(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + 1));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 2));
TestCase.assertEquals(2, extension.numProperties());
TestCase.assertEquals(2, extension.getProperties().size());
TestCase.assertEquals(3, extension.numValues());
TestCase.assertEquals(2, extension.getValues(PropertyNames.TAG).size());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(2, extension.numValues(PropertyNames.TAG));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + 2));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 3));
TestCase.assertTrue(extension.addValue(PropertyNames.TAG, tag + 4));
TestCase.assertFalse(extension.addValue(PropertyNames.TAG, tag + 4));
Set<String> values = new HashSet<>(
extension.getValues(PropertyNames.TAG));
for (int i = 1; i <= 4; i++) {
TestCase.assertTrue(values.contains(tag + i));
TestCase.assertTrue(extension.hasValue(PropertyNames.TAG, tag + i));
}
TestCase.assertEquals(1,
extension.deleteValue(PropertyNames.TAG, tag + 3));
TestCase.assertEquals(3, extension.getValues(PropertyNames.TAG).size());
TestCase.assertEquals(3, extension.numValues(PropertyNames.TAG));
TestCase.assertFalse(extension.hasValue(PropertyNames.TAG, tag + 3));
TestCase.assertEquals(3, extension.deleteProperty(PropertyNames.TAG));
TestCase.assertEquals(1, extension.numProperties());
TestCase.assertEquals(1, extension.getProperties().size());
TestCase.assertEquals(1, extension.numValues());
TestCase.assertTrue(extension.getValues(PropertyNames.TAG).isEmpty());
TestCase.assertFalse(extension.hasSingleValue(PropertyNames.TAG));
TestCase.assertFalse(extension.hasValues(PropertyNames.TAG));
TestCase.assertEquals(0, extension.numValues(PropertyNames.TAG));
extension.removeExtension();
TestCase.assertFalse(extension.has());
TestCase.assertFalse(geoPackage.isTable(PropertiesExtension.TABLE_NAME));
}
}
| property name tests
| src/test/java/mil/nga/geopackage/test/extension/properties/PropertiesExtensionTest.java | property name tests | <ide><path>rc/test/java/mil/nga/geopackage/test/extension/properties/PropertiesExtensionTest.java
<ide> package mil.nga.geopackage.test.extension.properties;
<ide>
<ide> import java.util.HashSet;
<add>import java.util.List;
<ide> import java.util.Set;
<add>import java.util.UUID;
<ide>
<ide> import junit.framework.TestCase;
<ide> import mil.nga.geopackage.extension.Extensions;
<ide>
<ide> }
<ide>
<add> /**
<add> * Test property names
<add> */
<add> @Test
<add> public void testPropertyNames() {
<add>
<add> PropertiesExtension extension = new PropertiesExtension(geoPackage);
<add> extension.getOrCreate();
<add>
<add> int count = 0;
<add>
<add> count += testPropertyName(extension, PropertyNames.CONTRIBUTOR);
<add> count += testPropertyName(extension, PropertyNames.COVERAGE);
<add> count += testPropertyName(extension, PropertyNames.CREATED);
<add> count += testPropertyName(extension, PropertyNames.CREATOR);
<add> count += testPropertyName(extension, PropertyNames.DATE);
<add> count += testPropertyName(extension, PropertyNames.DESCRIPTION);
<add> count += testPropertyName(extension, PropertyNames.IDENTIFIER);
<add> count += testPropertyName(extension, PropertyNames.LICENSE);
<add> count += testPropertyName(extension, PropertyNames.MODIFIED);
<add> count += testPropertyName(extension, PropertyNames.PUBLISHER);
<add> count += testPropertyName(extension, PropertyNames.REFERENCES);
<add> count += testPropertyName(extension, PropertyNames.RELATION);
<add> count += testPropertyName(extension, PropertyNames.SOURCE);
<add> count += testPropertyName(extension, PropertyNames.SPATIAL);
<add> count += testPropertyName(extension, PropertyNames.SUBJECT);
<add> count += testPropertyName(extension, PropertyNames.TAG);
<add> count += testPropertyName(extension, PropertyNames.TEMPORAL);
<add> count += testPropertyName(extension, PropertyNames.TITLE);
<add> count += testPropertyName(extension, PropertyNames.TYPE);
<add> count += testPropertyName(extension, PropertyNames.URI);
<add> count += testPropertyName(extension, PropertyNames.VALID);
<add> count += testPropertyName(extension, PropertyNames.VERSION);
<add>
<add> TestCase.assertEquals(22, extension.numProperties());
<add> TestCase.assertEquals(count, extension.numValues());
<add>
<add> TestCase.assertEquals(count, extension.deleteAll());
<add>
<add> TestCase.assertEquals(0, extension.numProperties());
<add> TestCase.assertEquals(0, extension.numValues());
<add>
<add> extension.removeExtension();
<add> TestCase.assertFalse(extension.has());
<add> }
<add>
<add> private int testPropertyName(PropertiesExtension extension, String property) {
<add>
<add> TestCase.assertFalse(extension.hasProperty(property));
<add>
<add> int count = 1;
<add> if (Math.random() < .5) {
<add> count = 1 + (int) (10 * Math.random());
<add> }
<add>
<add> Set<String> values = new HashSet<>();
<add> for (int i = 0; i < count; i++) {
<add> String value = UUID.randomUUID().toString();
<add> values.add(value);
<add> extension.addValue(property, value);
<add> }
<add>
<add> TestCase.assertTrue(extension.hasProperty(property));
<add> TestCase.assertEquals(count, extension.numValues(property));
<add> TestCase.assertEquals(count == 1, extension.hasSingleValue(property));
<add> TestCase.assertTrue(extension.hasValues(property));
<add>
<add> List<String> propertyValues = extension.getValues(property);
<add> TestCase.assertEquals(values.size(), propertyValues.size());
<add> for (String value : propertyValues) {
<add> TestCase.assertTrue(values.contains(value));
<add> TestCase.assertTrue(extension.hasValue(property, value));
<add> }
<add>
<add> return count;
<add> }
<add>
<ide> } |
|
Java | apache-2.0 | 509284ece36c1ccf07dd72e7cfad2a832f307f76 | 0 | markwoon/pathvisio,markwoon/pathvisio,PathVisio/pathvisio,PathVisio/pathvisio,markwoon/pathvisio,PathVisio/pathvisio,PathVisio/pathvisio,markwoon/pathvisio | // PathVisio,
// a tool for data visualization and analysis using Biological Pathways
// Copyright 2006-2009 BiGCaT Bioinformatics
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.pathvisio.view;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.RoundRectangle2D;
/**
* This defines and registers some
* Special Shapes that are very specific to GenMAPP,
* such as the GenMAPP ProteinComplex, Vesicle and Ribosome.
*
* Shapes are defined and registered in the static section of this class.
*/
class GenMAPPShapes
{
static void registerShapes()
{
ShapeRegistry.registerShape ("Pentagon", getRegularPolygon (5, 10, 10) );
ShapeRegistry.registerShape ("Hexagon", getRegularPolygon (6, 10, 10) );
ShapeRegistry.registerShape ("Triangle", getRegularPolygon (3, 10, 10) );
ShapeRegistry.registerShape ("Mitochondria", getPluggableShape (Internal.MITOCHONDRIA));
ShapeRegistry.registerShape ("Sarcoplasmic Reticulum", getPluggableShape (Internal.SARCOPLASMICRETICULUM));
ShapeRegistry.registerShape ("Endoplasmic Reticulum", getPluggableShape (Internal.ENDOPLASMICRETICULUM));
ShapeRegistry.registerShape ("Golgi Apparatus", getPluggableShape (Internal.GOLGIAPPARATUS));
ShapeRegistry.registerShape ("OrganA", getPluggableShape (Internal.ORGANA));
ShapeRegistry.registerShape ("OrganB", getPluggableShape (Internal.ORGANB));
ShapeRegistry.registerShape ("OrganC", getPluggableShape (Internal.ORGANC));
ShapeRegistry.registerShape ("CellA", getPluggableShape (Internal.CELLA));
ShapeRegistry.registerShape ("Ribosome", getPluggableShape (Internal.RIBOSOME));
ShapeRegistry.registerShape ("ProteinComplex", getPluggableShape (Internal.PROTEINB));
ShapeRegistry.registerShape ("Cell", getCombinedShape (Internal.CELL));
ShapeRegistry.registerShape ("Nucleus", getCombinedShape (Internal.NUCLEUS));
// ShapeRegistry.registerShape ("Mitochondria", getCombinedShape (Internal.MITOCHONDRIA));
ShapeRegistry.registerShape ("Organelle", getCombinedShape (Internal.ORGANELLE));
ShapeRegistry.registerShape ("Vesicle", getCombinedShape (Internal.VESICLE));
}
/**
these constants are internal, only for the switch statement below.
There is no relation with the constants defined in ShapeType.
*/
private enum Internal
{
MITOCHONDRIA,
SARCOPLASMICRETICULUM,
ENDOPLASMICRETICULUM,
GOLGIAPPARATUS,
@Deprecated ORGANA,
@Deprecated ORGANB,
@Deprecated ORGANC,
@Deprecated CELLA,
@Deprecated RIBOSOME,
@Deprecated PROTEINB,
@Deprecated CELL,
@Deprecated NUCLEUS,
@Deprecated ORGANELLE,
@Deprecated VESICLE;
}
/**
Internal,
Only for general shape types that can be described as a path.
The shapes are constructed as a general path with arbitrary size
and then resized to fit w and h parameters.
*/
static private java.awt.Shape getPluggableShape (Internal st)
{
GeneralPath path = new GeneralPath();
switch (st)
{
case ORGANA:
path.moveTo (33, 30);
path.curveTo (33, 46, 26, 60, 17, 60);
path.curveTo (8, 60, 0, 46, 0, 30);
path.curveTo (0, 14, 8, 0, 17, 0);
path.curveTo (26, 0, 33, 14, 33, 30);
path.closePath();
break;
case CELLA:
path.moveTo (44, 140);
path.curveTo (38, 158, 28, 169, 21, 165);
path.curveTo (15, 161, 14, 143, 20, 126);
path.curveTo (26, 108, 36, 97, 43, 101);
path.curveTo (49, 105, 50, 123, 44, 140);
path.closePath();
path.moveTo (64, 109);
path.curveTo (49, 162, 27, 202, 13, 198);
path.curveTo (0, 193, 1, 147, 16, 93);
path.curveTo (31, 40, 54, 0, 67, 5);
path.curveTo (80, 9, 79, 56, 64, 109);
path.closePath();
break;
case ORGANC:
path.moveTo (105.00f, 0.44f);
path.curveTo (47.56f, 0.44f, 0.34f, 44.59f, 0.34f, 99.38f);
path.curveTo (0.34f, 154.16f, 47.56f, 198.28f, 105.00f, 198.28f);
path.curveTo (162.44f, 198.28f, 209.66f, 154.16f, 209.66f, 99.38f);
path.curveTo (209.66f, 44.59f, 162.44f, 0.44f, 105.00f, 0.44f);
path.closePath();
path.moveTo (105.00f, 15.44f);
path.curveTo (154.80f, 15.44f, 194.66f, 53.22f, 194.66f, 99.38f);
path.curveTo (194.66f, 145.53f, 154.80f, 183.28f, 105.00f, 183.28f);
path.curveTo (55.20f, 183.28f, 15.34f, 145.53f, 15.34f, 99.38f);
path.curveTo (15.34f, 53.22f, 55.20f, 15.44f, 105.00f, 15.44f);
path.closePath();
break;
case ORGANB:
path.moveTo (15, 281);
path.curveTo (6, 254, 0, 199, 0, 156);
path.curveTo (0, 113, 6, 49, 15, 21);
path.curveTo (15, 12, 26, 1, 38, 1);
path.curveTo (49, 1, 60, 11, 60, 23);
path.curveTo (59, 36, 50, 46, 32, 44);
path.curveTo (23, 71, 23, 102, 23, 144);
path.curveTo (23, 188, 23, 227, 32, 254);
path.curveTo (50, 254, 60, 265, 60, 278);
path.curveTo (60, 290, 46, 300, 36, 300);
path.curveTo (27, 300, 15, 289, 15, 281);
path.closePath();
break;
case RIBOSOME:
path.moveTo (23.97f, 0.47f);
path.curveTo (19.30f, 0.47f, 15.22f, 5.18f, 13.03f, 12.16f);
path.curveTo (11.68f, 10.52f, 10.06f, 9.53f, 8.28f, 9.53f);
path.curveTo (3.71f, 9.53f, -0.00f, 15.90f, 0.00f, 23.75f);
path.curveTo (0.00f, 31.60f, 3.71f, 37.97f, 8.28f, 37.97f);
path.curveTo (10.02f, 37.97f, 11.64f, 37.04f, 12.97f, 35.47f);
path.curveTo (15.14f, 42.57f, 19.25f, 47.38f, 23.97f, 47.38f);
path.curveTo (30.95f, 47.38f, 36.63f, 36.85f, 36.63f, 23.91f);
path.curveTo (36.63f, 10.96f, 30.95f, 0.47f, 23.97f, 0.47f);
path.closePath();
break;
case PROTEINB:
path.moveTo (35.22f, 1.03f);
path.curveTo (28.17f, 1.34f, 21.64f, 5.70f, 18.19f, 11.78f);
path.curveTo (7.92f, 13.45f, 0.25f, 23.46f, 0.47f, 33.72f);
path.curveTo (0.27f, 43.64f, 7.43f, 53.33f, 17.25f, 55.40f);
path.curveTo (21.43f, 63.78f, 31.55f, 68.86f, 40.71f, 66.31f);
path.curveTo (46.39f, 64.88f, 51.27f, 60.86f, 54.06f, 55.75f);
path.curveTo (64.33f, 54.31f, 72.18f, 44.49f, 72.18f, 34.27f);
path.curveTo (72.63f, 24.01f, 65.17f, 13.84f, 54.94f, 11.93f);
path.curveTo (52.33f, 8.95f, 49.65f, 5.12f, 45.70f, 3.35f);
path.curveTo (42.49f, 1.64f, 38.84f, 0.89f, 35.22f, 1.03f);
path.closePath();
path.moveTo (35.41f, 4.53f);
path.curveTo (43.73f, 3.99f, 51.43f, 10.33f, 53.37f, 18.29f);
path.curveTo (54.52f, 22.42f, 54.39f, 27.08f, 52.34f, 30.90f);
path.curveTo (47.43f, 24.06f, 37.85f, 20.57f, 29.78f, 23.34f);
path.curveTo (25.94f, 24.54f, 22.47f, 26.87f, 19.87f, 29.94f);
path.curveTo (15.56f, 19.96f, 22.24f, 6.85f, 33.04f, 4.83f);
path.curveTo (33.82f, 4.67f, 34.61f, 4.57f, 35.41f, 4.53f);
path.closePath();
path.moveTo (16.34f, 15.78f);
path.curveTo (14.45f, 21.42f, 14.57f, 28.02f, 17.50f, 33.37f);
path.curveTo (14.54f, 38.79f, 13.51f, 45.40f, 15.56f, 51.31f);
path.curveTo (6.44f, 47.89f, 1.80f, 36.75f, 4.90f, 27.69f);
path.curveTo (6.60f, 22.24f, 10.89f, 17.59f, 16.34f, 15.78f);
path.closePath();
path.moveTo (56.28f, 15.81f);
path.curveTo (65.68f, 18.80f, 70.76f, 29.93f, 67.94f, 39.17f);
path.curveTo (66.32f, 45.03f, 61.68f, 50.04f, 55.81f, 51.78f);
path.curveTo (57.00f, 48.33f, 57.35f, 44.62f, 56.61f, 41.03f);
path.curveTo (56.22f, 37.40f, 53.29f, 34.25f, 56.26f, 30.98f);
path.curveTo (58.07f, 26.12f, 57.96f, 20.69f, 56.28f, 15.81f);
path.closePath();
path.moveTo (35.81f, 25.90f);
path.curveTo (43.76f, 25.85f, 50.93f, 31.93f, 52.77f, 39.57f);
path.curveTo (55.25f, 48.14f, 51.03f, 58.27f, 42.73f, 61.92f);
path.curveTo (35.28f, 65.52f, 25.73f, 62.48f, 21.37f, 55.55f);
path.curveTo (15.48f, 47.23f, 17.70f, 34.43f, 26.28f, 28.84f);
path.curveTo (29.08f, 26.94f, 32.44f, 25.90f, 35.81f, 25.90f);
path.closePath();
break;
case VESICLE:
path.moveTo (33, 30);
path.curveTo (33, 46, 26, 60, 17, 60);
path.curveTo (8, 60, 0, 46, 0, 30);
path.curveTo (0, 14, 8, 0, 17, 0);
path.curveTo (26, 0, 33, 14, 33, 30);
path.closePath();
break;
case MITOCHONDRIA:
path.moveTo (72.81f, 85.70f);
path.curveTo (97.59f, 83.01f, 94.55f, 147.38f, 119.28f, 144.29f);
path.curveTo (166.27f, 144.40f, 136.22f, 42.38f, 175.51f, 41.70f);
path.curveTo (215.08f, 41.02f, 188.27f, 150.12f, 227.79f, 148.28f);
path.curveTo (271.14f, 146.25f, 230.67f, 29.04f, 274.00f, 26.55f);
path.curveTo (317.72f, 24.05f, 290.58f, 142.55f, 334.36f, 143.22f);
path.curveTo (371.55f, 143.80f, 351.55f, 43.14f, 388.66f, 45.75f);
path.curveTo (429.51f, 48.62f, 392.43f, 153.80f, 432.85f, 160.40f);
path.curveTo (459.82f, 164.80f, 457.96f, 94.30f, 485.13f, 97.26f);
path.curveTo (548.33f, 124.69f, 534.13f, 233.75f, 472.75f, 258.89f);
path.curveTo (454.92f, 261.42f, 450.22f, 220.87f, 432.35f, 223.03f);
path.curveTo (400.60f, 226.86f, 409.73f, 303.71f, 377.80f, 301.95f);
path.curveTo (348.05f, 300.30f, 365.16f, 223.61f, 335.37f, 223.28f);
path.curveTo (295.83f, 222.85f, 316.30f, 327.99f, 276.78f, 326.44f);
path.curveTo (241.90f, 325.08f, 266.95f, 236.11f, 232.34f, 231.61f);
path.curveTo (200.07f, 227.42f, 201.79f, 311.88f, 169.71f, 306.49f);
path.curveTo (134.22f, 300.53f, 167.04f, 209.92f, 131.32f, 205.60f);
path.curveTo (110.14f, 203.04f, 116.28f, 257.74f, 94.95f, 258.26f);
path.curveTo (15.35f, 236.77f, 5.51f, 114.51f, 72.81f, 85.70f);
path.closePath();
path.moveTo (272.82f, 0.84f);
path.curveTo (378.97f, 1.13f, 542.51f, 62.39f, 543.54f, 168.53f);
path.curveTo (544.58f, 275.18f, 381.50f, 342.19f, 274.84f, 342.28f);
path.curveTo (166.69f, 342.36f, 0.84f, 274.66f, 2.10f, 166.51f);
path.curveTo (3.33f, 60.72f, 167.03f, 0.56f, 272.82f, 0.84f);
path.closePath();
break;
case SARCOPLASMICRETICULUM:
path.moveTo (118.53f, 16.63f);
path.curveTo (34.13f, 22.00f, 23.84f , 107.76f, 49.44f , 169.22f);
path.curveTo (73.73f, 242.63f, 0.51f , 289.88f, 56.13f , 366.83f);
path.curveTo (99.99f, 419.32f, 176.93f , 391.26f, 192.04f , 332.54f);
path.curveTo (207.42f, 271.52f, 163.49f , 228.38f, 183.45f , 168.61f);
path.curveTo (211.75f, 89.03f, 181.43f , 16.01f, 118.53f , 16.63f);
path.lineTo (118.53f, 16.63f);
path.closePath();
break;
case ENDOPLASMICRETICULUM:
path.moveTo (115.62f, 170.76f);
path.curveTo (106.85f, 115.66f, 152.29f , 74.72f, 152.11f , 37.31f);
path.curveTo (151.57f, 22.91f, 135.75f , 10.96f, 123.59f , 21.51f);
path.curveTo (97.02f, 44.83f, 99.19f , 108.29f, 90.52f , 146.58f);
path.curveTo (89.97f, 157.27f, 79.04f , 153.89f, 78.44f , 145.14f);
path.curveTo (69.32f, 111.41f, 105.16f , 72.62f, 87.74f , 58.00f);
path.curveTo (57.12f, 33.80f, 42.90f , 120.64f, 53.32f , 143.34f);
path.curveTo (65.01f, 185.32f, 49.93f , 215.62f, 42.80f , 189.23f);
path.curveTo (39.00f, 173.52f, 52.26f , 156.40f, 41.55f , 141.32f);
path.curveTo (34.82f, 133.03f, 23.22f , 139.41f, 16.36f , 150.49f);
path.curveTo (0.00f, 182.29f, 23.74f , 271.85f, 49.05f , 257.53f);
path.curveTo (56.38f, 251.73f, 44.01f , 231.76f, 55.14f , 229.10f);
path.curveTo (66.52f, 226.70f, 63.22f , 247.43f, 67.13f , 256.43f);
path.curveTo (70.73f, 268.42f, 74.67f , 281.17f, 83.91f , 290.85f);
path.curveTo (91.38f, 298.36f, 107.76f , 297.10f, 110.06f , 285.05f);
path.curveTo (113.23f, 257.62f, 69.35f , 201.07f, 93.40f , 192.41f);
path.curveTo (122.33f, 184.37f, 100.80f , 263.03f, 131.30f , 280.35f);
path.curveTo (146.12f, 286.36f, 155.69f , 278.51f, 154.40f , 268.41f);
path.curveTo (150.12f, 235.05f, 115.21f , 201.24f, 115.47f , 170.24f);
path.lineTo (115.62f, 170.76f);
path.closePath();
break;
case GOLGIAPPARATUS:
path.moveTo (148.89f, 77.62f);
path.curveTo (100.07f, 3.50f, 234.06f , 7.65f, 207.78f , 62.66f);
path.curveTo (187.00f, 106.50f, 171.09f , 190.54f, 209.13f , 287.47f);
path.curveTo (240.55f, 351.33f, 111.35f , 353.69f, 144.36f , 284.72f);
path.curveTo (171.13f, 215.31f, 165.77f , 107.32f, 148.89f , 77.62f);
path.lineTo (148.89f, 77.62f);
path.closePath();
path.moveTo (88.16f, 91.24f);
path.curveTo (62.70f, 40.69f, 158.70f , 44.41f, 131.59f , 92.83f);
path.curveTo (116.28f, 128.91f, 117.95f , 238.10f, 134.33f , 269.85f);
path.curveTo (154.45f, 313.72f, 56.82f , 315.51f, 85.96f , 264.54f);
path.curveTo (102.37f, 223.58f, 110.67f , 141.16f, 88.16f , 91.24f);
path.lineTo (88.16f, 91.24f);
path.closePath();
path.moveTo (83.40f, 133.15f);
path.curveTo (86.43f, 160.23f, 86.72f , 203.15f, 82.05f , 220.09f);
path.curveTo (73.24f, 250.74f, 69.98f , 262.93f, 50.80f , 265.89f);
path.curveTo (32.17f, 265.52f, 22.80f , 242.80f, 39.49f , 227.87f);
path.curveTo (50.94f, 214.61f, 53.98f , 202.20f, 55.20f , 173.72f);
path.curveTo (54.63f, 152.16f, 56.07f , 133.57f, 43.25f , 126.63f);
path.curveTo (25.26f, 121.45f, 30.31f , 86.90f, 56.06f , 93.20f);
path.curveTo (69.86f, 95.63f, 79.23f , 109.03f, 83.40f , 133.15f);
path.lineTo (83.40f, 133.15f);
path.closePath();
break;
}
return path;
}
/**
* Internal,
* For shape types composed of multiple basic shapes.
*
* NOTE: These are all being deprecated. They should be
* automatically converted to semantic-free shapes.
*/
static private java.awt.Shape getCombinedShape (Internal st)
{
Area area = new Area();
switch (st)
{
case CELL:
RoundRectangle2D.Double c1 = new RoundRectangle2D.Double(0,0,600,600,100, 100);
RoundRectangle2D.Double c2 = new RoundRectangle2D.Double(11,11,578,578,100, 100);
area.add(new Area(c1));
area.exclusiveOr(new Area(c2));
break;
case NUCLEUS:
Ellipse2D.Double n1 = new Ellipse2D.Double (0, 0, 300, 200);
Ellipse2D.Double n2 = new Ellipse2D.Double (8, 8, 284, 184);
area.add(new Area(n1));
area.exclusiveOr(new Area(n2));
break;
// case MITOCHONDRIA:
// RoundRectangle2D.Double m1 = new RoundRectangle2D.Double (0, 0, 200, 100, 40, 60);
// Ellipse2D.Double m2 = new Ellipse2D.Double (4, 4, 192, 92);
// area.add(new Area(m1));
// area.exclusiveOr(new Area(m2));
// break;
case ORGANELLE:
RoundRectangle2D.Double g1 = new RoundRectangle2D.Double(0,0,200,100,40, 60);
RoundRectangle2D.Double g2 = new RoundRectangle2D.Double(8,8,184,84,40, 60);
area.add(new Area(g1));
area.exclusiveOr(new Area(g2));
break;
case VESICLE:
Ellipse2D.Double v1 = new Ellipse2D.Double (0, 0, 100, 100);
area.add(new Area(v1));
break;
}
return area;
}
static private java.awt.Shape getRegularPolygon (int sides, double w, double h)
{
GeneralPath path = new GeneralPath();
for (int i = 0; i < sides; ++i)
{
double angle = Math.PI * 2 * i / sides;
double x = (w/2) * (1 + Math.cos (angle));
double y = (h/2) * (1 + Math.sin (angle));
if (i == 0)
{
path.moveTo ((float)x, (float)y);
}
else
{
path.lineTo ((float)x, (float)y);
}
}
path.closePath();
return path;
}
} | src/core/org/pathvisio/view/GenMAPPShapes.java | // PathVisio,
// a tool for data visualization and analysis using Biological Pathways
// Copyright 2006-2009 BiGCaT Bioinformatics
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.pathvisio.view;
import java.awt.geom.Area;
import java.awt.geom.Ellipse2D;
import java.awt.geom.GeneralPath;
import java.awt.geom.RoundRectangle2D;
/**
* This defines and registers some
* Special Shapes that are very specific to GenMAPP,
* such as the GenMAPP ProteinComplex, Vesicle and Ribosome.
*
* Shapes are defined and registered in the static section of this class.
*/
class GenMAPPShapes
{
static void registerShapes()
{
ShapeRegistry.registerShape ("Pentagon", getRegularPolygon (5, 10, 10) );
ShapeRegistry.registerShape ("Hexagon", getRegularPolygon (6, 10, 10) );
ShapeRegistry.registerShape ("Triangle", getRegularPolygon (3, 10, 10) );
ShapeRegistry.registerShape ("Mitochondria", getPluggableShape (Internal.MITOCHONDRIA));
ShapeRegistry.registerShape ("Sarcoplasmic Reticulum", getPluggableShape (Internal.SARCOPLASMICRETICULUM));
ShapeRegistry.registerShape ("Endoplasmic Reticulum", getPluggableShape (Internal.ENDOPLASMICRETICULUM));
ShapeRegistry.registerShape ("Golgi Apparatus", getPluggableShape (Internal.GOLGIAPPARATUS));
ShapeRegistry.registerShape ("OrganA", getPluggableShape (Internal.ORGANA));
ShapeRegistry.registerShape ("OrganB", getPluggableShape (Internal.ORGANB));
ShapeRegistry.registerShape ("OrganC", getPluggableShape (Internal.ORGANC));
ShapeRegistry.registerShape ("CellA", getPluggableShape (Internal.CELLA));
ShapeRegistry.registerShape ("Ribosome", getPluggableShape (Internal.RIBOSOME));
ShapeRegistry.registerShape ("ProteinComplex", getPluggableShape (Internal.PROTEINB));
ShapeRegistry.registerShape ("Cell", getCombinedShape (Internal.CELL));
ShapeRegistry.registerShape ("Nucleus", getCombinedShape (Internal.NUCLEUS));
// ShapeRegistry.registerShape ("Mitochondria", getCombinedShape (Internal.MITOCHONDRIA));
ShapeRegistry.registerShape ("Organelle", getCombinedShape (Internal.ORGANELLE));
ShapeRegistry.registerShape ("Vesicle", getCombinedShape (Internal.VESICLE));
}
/**
these constants are internal, only for the switch statement below.
There is no relation with the constants defined in ShapeType.
*/
private enum Internal
{
MITOCHONDRIA,
SARCOPLASMICRETICULUM,
ENDOPLASMICRETICULUM,
GOLGIAPPARATUS,
@Deprecated ORGANA,
@Deprecated ORGANB,
@Deprecated ORGANC,
@Deprecated CELLA,
@Deprecated RIBOSOME,
@Deprecated PROTEINB,
@Deprecated CELL,
@Deprecated NUCLEUS,
@Deprecated ORGANELLE,
@Deprecated VESICLE;
}
/**
Internal,
Only for general shape types that can be described as a path.
The shapes are constructed as a general path with arbitrary size
and then resized to fit w and h parameters.
*/
static private java.awt.Shape getPluggableShape (Internal st)
{
GeneralPath path = new GeneralPath();
switch (st)
{
case ORGANA:
path.moveTo (33, 30);
path.curveTo (33, 46, 26, 60, 17, 60);
path.curveTo (8, 60, 0, 46, 0, 30);
path.curveTo (0, 14, 8, 0, 17, 0);
path.curveTo (26, 0, 33, 14, 33, 30);
path.closePath();
break;
case CELLA:
path.moveTo (44, 140);
path.curveTo (38, 158, 28, 169, 21, 165);
path.curveTo (15, 161, 14, 143, 20, 126);
path.curveTo (26, 108, 36, 97, 43, 101);
path.curveTo (49, 105, 50, 123, 44, 140);
path.closePath();
path.moveTo (64, 109);
path.curveTo (49, 162, 27, 202, 13, 198);
path.curveTo (0, 193, 1, 147, 16, 93);
path.curveTo (31, 40, 54, 0, 67, 5);
path.curveTo (80, 9, 79, 56, 64, 109);
path.closePath();
break;
case ORGANC:
path.moveTo (105.00f, 0.44f);
path.curveTo (47.56f, 0.44f, 0.34f, 44.59f, 0.34f, 99.38f);
path.curveTo (0.34f, 154.16f, 47.56f, 198.28f, 105.00f, 198.28f);
path.curveTo (162.44f, 198.28f, 209.66f, 154.16f, 209.66f, 99.38f);
path.curveTo (209.66f, 44.59f, 162.44f, 0.44f, 105.00f, 0.44f);
path.closePath();
path.moveTo (105.00f, 15.44f);
path.curveTo (154.80f, 15.44f, 194.66f, 53.22f, 194.66f, 99.38f);
path.curveTo (194.66f, 145.53f, 154.80f, 183.28f, 105.00f, 183.28f);
path.curveTo (55.20f, 183.28f, 15.34f, 145.53f, 15.34f, 99.38f);
path.curveTo (15.34f, 53.22f, 55.20f, 15.44f, 105.00f, 15.44f);
path.closePath();
break;
case ORGANB:
path.moveTo (15, 281);
path.curveTo (6, 254, 0, 199, 0, 156);
path.curveTo (0, 113, 6, 49, 15, 21);
path.curveTo (15, 12, 26, 1, 38, 1);
path.curveTo (49, 1, 60, 11, 60, 23);
path.curveTo (59, 36, 50, 46, 32, 44);
path.curveTo (23, 71, 23, 102, 23, 144);
path.curveTo (23, 188, 23, 227, 32, 254);
path.curveTo (50, 254, 60, 265, 60, 278);
path.curveTo (60, 290, 46, 300, 36, 300);
path.curveTo (27, 300, 15, 289, 15, 281);
path.closePath();
break;
case RIBOSOME:
path.moveTo (23.97f, 0.47f);
path.curveTo (19.30f, 0.47f, 15.22f, 5.18f, 13.03f, 12.16f);
path.curveTo (11.68f, 10.52f, 10.06f, 9.53f, 8.28f, 9.53f);
path.curveTo (3.71f, 9.53f, -0.00f, 15.90f, 0.00f, 23.75f);
path.curveTo (0.00f, 31.60f, 3.71f, 37.97f, 8.28f, 37.97f);
path.curveTo (10.02f, 37.97f, 11.64f, 37.04f, 12.97f, 35.47f);
path.curveTo (15.14f, 42.57f, 19.25f, 47.38f, 23.97f, 47.38f);
path.curveTo (30.95f, 47.38f, 36.63f, 36.85f, 36.63f, 23.91f);
path.curveTo (36.63f, 10.96f, 30.95f, 0.47f, 23.97f, 0.47f);
path.closePath();
break;
case PROTEINB:
path.moveTo (35.22f, 1.03f);
path.curveTo (28.17f, 1.34f, 21.64f, 5.70f, 18.19f, 11.78f);
path.curveTo (7.92f, 13.45f, 0.25f, 23.46f, 0.47f, 33.72f);
path.curveTo (0.27f, 43.64f, 7.43f, 53.33f, 17.25f, 55.40f);
path.curveTo (21.43f, 63.78f, 31.55f, 68.86f, 40.71f, 66.31f);
path.curveTo (46.39f, 64.88f, 51.27f, 60.86f, 54.06f, 55.75f);
path.curveTo (64.33f, 54.31f, 72.18f, 44.49f, 72.18f, 34.27f);
path.curveTo (72.63f, 24.01f, 65.17f, 13.84f, 54.94f, 11.93f);
path.curveTo (52.33f, 8.95f, 49.65f, 5.12f, 45.70f, 3.35f);
path.curveTo (42.49f, 1.64f, 38.84f, 0.89f, 35.22f, 1.03f);
path.closePath();
path.moveTo (35.41f, 4.53f);
path.curveTo (43.73f, 3.99f, 51.43f, 10.33f, 53.37f, 18.29f);
path.curveTo (54.52f, 22.42f, 54.39f, 27.08f, 52.34f, 30.90f);
path.curveTo (47.43f, 24.06f, 37.85f, 20.57f, 29.78f, 23.34f);
path.curveTo (25.94f, 24.54f, 22.47f, 26.87f, 19.87f, 29.94f);
path.curveTo (15.56f, 19.96f, 22.24f, 6.85f, 33.04f, 4.83f);
path.curveTo (33.82f, 4.67f, 34.61f, 4.57f, 35.41f, 4.53f);
path.closePath();
path.moveTo (16.34f, 15.78f);
path.curveTo (14.45f, 21.42f, 14.57f, 28.02f, 17.50f, 33.37f);
path.curveTo (14.54f, 38.79f, 13.51f, 45.40f, 15.56f, 51.31f);
path.curveTo (6.44f, 47.89f, 1.80f, 36.75f, 4.90f, 27.69f);
path.curveTo (6.60f, 22.24f, 10.89f, 17.59f, 16.34f, 15.78f);
path.closePath();
path.moveTo (56.28f, 15.81f);
path.curveTo (65.68f, 18.80f, 70.76f, 29.93f, 67.94f, 39.17f);
path.curveTo (66.32f, 45.03f, 61.68f, 50.04f, 55.81f, 51.78f);
path.curveTo (57.00f, 48.33f, 57.35f, 44.62f, 56.61f, 41.03f);
path.curveTo (56.22f, 37.40f, 53.29f, 34.25f, 56.26f, 30.98f);
path.curveTo (58.07f, 26.12f, 57.96f, 20.69f, 56.28f, 15.81f);
path.closePath();
path.moveTo (35.81f, 25.90f);
path.curveTo (43.76f, 25.85f, 50.93f, 31.93f, 52.77f, 39.57f);
path.curveTo (55.25f, 48.14f, 51.03f, 58.27f, 42.73f, 61.92f);
path.curveTo (35.28f, 65.52f, 25.73f, 62.48f, 21.37f, 55.55f);
path.curveTo (15.48f, 47.23f, 17.70f, 34.43f, 26.28f, 28.84f);
path.curveTo (29.08f, 26.94f, 32.44f, 25.90f, 35.81f, 25.90f);
path.closePath();
break;
case VESICLE:
path.moveTo (33, 30);
path.curveTo (33, 46, 26, 60, 17, 60);
path.curveTo (8, 60, 0, 46, 0, 30);
path.curveTo (0, 14, 8, 0, 17, 0);
path.curveTo (26, 0, 33, 14, 33, 30);
path.closePath();
break;
case MITOCHONDRIA:
path.moveTo (72.81f, 85.70f);
path.curveTo (97.59f, 83.01f, 94.55f, 147.38f, 119.28f, 144.29f);
path.curveTo (166.27f, 144.40f, 136.22f, 42.38f, 175.51f, 41.70f);
path.curveTo (215.08f, 41.02f, 188.27f, 150.12f, 227.79f, 148.28f);
path.curveTo (271.14f, 146.25f, 230.67f, 29.04f, 274.00f, 26.55f);
path.curveTo (317.72f, 24.05f, 290.58f, 142.55f, 334.36f, 143.22f);
path.curveTo (371.55f, 143.80f, 351.55f, 43.14f, 388.66f, 45.75f);
path.curveTo (429.51f, 48.62f, 392.43f, 153.80f, 432.85f, 160.40f);
path.curveTo (459.82f, 164.80f, 457.96f, 94.30f, 485.13f, 97.26f);
path.curveTo (548.33f, 124.69f, 534.13f, 233.75f, 472.75f, 258.89f);
path.curveTo (454.92f, 261.42f, 450.22f, 220.87f, 432.35f, 223.03f);
path.curveTo (400.60f, 226.86f, 409.73f, 303.71f, 377.80f, 301.95f);
path.curveTo (348.05f, 300.30f, 365.16f, 223.61f, 335.37f, 223.28f);
path.curveTo (295.83f, 222.85f, 316.30f, 327.99f, 276.78f, 326.44f);
path.curveTo (241.90f, 325.08f, 266.95f, 236.11f, 232.34f, 231.61f);
path.curveTo (200.07f, 227.42f, 201.79f, 311.88f, 169.71f, 306.49f);
path.curveTo (134.22f, 300.53f, 167.04f, 209.92f, 131.32f, 205.60f);
path.curveTo (110.14f, 203.04f, 116.28f, 257.74f, 94.95f, 258.26f);
path.curveTo (15.35f, 236.77f, 5.51f, 114.51f, 72.81f, 85.70f);
path.closePath();
path.moveTo (272.82f, 0.84f);
path.curveTo (378.97f, 1.13f, 542.51f, 62.39f, 543.54f, 168.53f);
path.curveTo (544.58f, 275.18f, 381.50f, 342.19f, 274.84f, 342.28f);
path.curveTo (166.69f, 342.36f, 0.84f, 274.66f, 2.10f, 166.51f);
path.curveTo (3.33f, 60.72f, 167.03f, 0.56f, 272.82f, 0.84f);
path.closePath();
break;
case SARCOPLASMICRETICULUM:
path.moveTo (83.84f, 11.06f);
path.curveTo (55.38f, 11.53f, 34.05f , 35.28f, 27.55f ,
59.42f);
path.curveTo (21.34f, 79.07f, 24.23f , 100.01f, 31.18f ,
119.12f);
path.curveTo (36.92f, 139.13f, 39.24f , 152.44f, 29.02f ,
171.33f);
path.curveTo (17.17f, 196.63f, 16.73f , 237.34f, 36.28f ,
259.06f);
path.curveTo (51.15f, 276.87f, 78.30f , 283.82f, 101.03f ,
276.18f);
path.curveTo (121.32f, 270.59f, 135.08f , 253.36f, 139.86f
, 234.78f);
path.curveTo (144.60f, 217.86f, 141.76f , 191.06f, 133.71f
, 174.98f);
path.curveTo (125.19f, 156.63f, 126.34f , 137.76f, 133.32f
, 118.69f);
path.curveTo (142.15f, 94.73f, 147.49f , 74.12f, 138.15f ,
49.55f);
path.curveTo (131.47f, 29.83f, 112.90f , 13.08f, 90.17f ,
11.34f);
path.curveTo (88.07f, 11.13f, 85.95f , 11.04f, 83.84f ,
11.06f);
path.lineTo (83.84f, 11.06f);
path.closePath();
break;
case ENDOPLASMICRETICULUM:
path.moveTo (117.65f, 173.00f);
path.curveTo (111.77f, 135.92f, 125.48f , 98.47f, 146.23f
, 66.69f);
path.curveTo (151.89f, 55.92f, 160.75f , 45.46f, 160.84f ,
33.16f);
path.curveTo (160.30f, 18.31f, 138.48f , 9.17f, 125.67f ,
19.03f);
path.curveTo (114.97f, 28.67f, 106.07f , 40.84f, 102.50f ,
54.17f);
path.curveTo (96.72f, 85.23f, 99.58f , 117.17f, 92.40f ,
148.05f);
path.curveTo (91.84f, 159.08f, 80.85f , 155.60f, 80.24f ,
146.57f);
path.curveTo (77.74f, 135.64f, 78.88f , 124.33f, 78.47f ,
113.28f);
path.curveTo (80.31f, 94.13f, 96.96f , 76.32f, 89.59f ,
56.67f);
path.curveTo (87.26f, 44.36f, 70.19f , 43.28f, 62.75f ,
52.40f);
path.curveTo (50.92f, 61.79f, 46.77f , 75.95f, 46.31f ,
89.59f);
path.curveTo (45.11f, 108.24f, 46.91f , 127.30f, 54.97f ,
144.71f);
path.curveTo (76.04f, 186.21f, 51.55f , 219.27f, 44.39f ,
192.05f);
path.curveTo (37.12f, 175.84f, 53.90f , 158.19f, 43.13f ,
142.63f);
path.curveTo (33.49f, 136.38f, 21.53f , 143.47f, 17.78f ,
152.08f);
path.curveTo (10.80f, 165.14f, 12.97f , 180.15f, 11.50f ,
194.16f);
path.curveTo (10.56f, 215.22f, 14.93f , 236.91f, 27.06f ,
255.12f);
path.curveTo (31.46f, 261.52f, 43.73f , 274.00f, 51.82f ,
262.00f);
path.curveTo (56.61f, 253.46f, 46.74f , 235.42f, 57.95f ,
232.67f);
path.curveTo (69.97f, 234.28f, 64.92f , 252.09f, 68.86f ,
261.37f);
path.curveTo (72.48f, 273.75f, 76.44f , 286.90f, 85.75f ,
296.88f);
path.curveTo (93.26f, 304.63f, 109.74f , 303.34f, 112.06f
, 290.90f);
path.curveTo (113.01f, 281.64f, 108.20f , 273.22f, 102.95f
, 264.08f);
path.curveTo (99.06f, 255.85f, 95.56f , 246.93f, 94.32f ,
237.86f);
path.curveTo (90.75f, 223.98f, 82.55f , 201.60f, 95.29f ,
195.34f);
path.curveTo (109.69f, 194.21f, 112.63f , 209.12f, 111.48f
, 218.85f);
path.curveTo (111.42f, 229.31f, 112.01f , 239.46f, 114.31f
, 250.45f);
path.curveTo (115.12f, 263.89f, 119.84f , 278.60f, 133.43f
, 286.05f);
path.curveTo (148.33f, 292.25f, 158.77f , 284.87f, 156.66f
, 273.73f);
path.curveTo (154.44f, 264.05f, 148.78f , 255.16f, 144.56f
, 245.97f);
path.curveTo (137.70f, 233.33f, 129.80f , 220.91f, 126.99f
, 206.95f);
path.curveTo (123.04f, 195.67f, 116.81f , 184.58f, 117.50f
, 172.46f);
path.lineTo (117.65f, 173.00f);
path.closePath();
break;
case GOLGIAPPARATUS:
path.moveTo (127.96f, 49.53f);
path.curveTo (121.66f, 31.27f, 138.80f , 7.66f, 156.31f ,
9.53f);
path.curveTo (173.25f, 7.39f, 191.42f , 27.16f, 188.92f ,
46.11f);
path.curveTo (186.48f, 61.46f, 179.96f , 70.14f, 178.51f ,
85.78f);
path.curveTo (177.21f, 106.34f, 175.10f , 125.37f, 174.43f
, 146.77f);
path.curveTo (174.05f, 177.43f, 171.66f , 209.04f, 176.42f
, 239.53f);
path.curveTo (180.30f, 258.62f, 191.10f , 275.62f, 192.45f
, 295.81f);
path.curveTo (192.54f, 311.66f, 182.11f , 325.50f, 166.59f
, 324.58f);
path.curveTo (152.14f, 325.43f, 133.44f , 325.03f, 126.49f
, 309.03f);
path.curveTo (121.02f, 295.74f, 125.62f , 281.07f, 131.05f
, 267.86f);
path.curveTo (136.98f, 249.17f, 140.34f , 231.09f, 143.04f
, 212.10f);
path.curveTo (146.20f, 178.60f, 144.97f , 149.47f, 144.04f
, 115.92f);
path.curveTo (142.28f, 93.04f, 135.66f , 70.93f, 127.96f ,
49.53f);
path.lineTo (127.96f, 49.53f);
path.closePath();
path.moveTo (77.73f, 59.23f);
path.curveTo (82.35f, 48.65f, 94.32f , 42.79f, 105.18f ,
45.62f);
path.curveTo (113.70f, 47.00f, 119.21f , 52.21f, 122.69f ,
61.49f);
path.curveTo (124.37f, 78.66f, 115.43f , 93.85f, 114.27f ,
110.08f);
path.curveTo (110.14f, 136.19f, 112.49f , 161.25f, 113.20f
, 185.45f);
path.curveTo (115.01f, 204.12f, 116.77f , 217.45f, 122.31f
, 232.89f);
path.curveTo (125.70f, 241.82f, 126.02f , 246.57f, 127.21f
, 254.37f);
path.curveTo (127.05f, 264.38f, 124.03f , 270.98f, 115.53f
, 276.21f);
path.curveTo (103.11f, 280.30f, 93.64f , 277.95f, 83.72f ,
270.17f);
path.curveTo (77.55f, 261.61f, 77.64f , 258.92f, 78.22f ,
247.63f);
path.curveTo (78.86f, 241.37f, 80.77f , 237.00f, 83.86f ,
227.01f);
path.curveTo (89.30f, 212.69f, 87.64f , 201.97f, 88.23f ,
186.83f);
path.curveTo (88.38f, 166.69f, 87.57f , 132.05f, 86.93f ,
111.58f);
path.curveTo (83.54f, 83.34f, 74.05f , 78.83f, 77.73f ,
59.23f);
path.lineTo (77.73f, 59.23f);
path.closePath();
path.moveTo (48.72f, 76.28f);
path.curveTo (56.17f, 78.12f, 61.95f , 84.59f, 66.18f ,
90.83f);
path.curveTo (70.48f, 98.11f, 73.15f , 105.72f, 75.01f ,
113.81f);
path.curveTo (76.93f, 121.92f, 76.68f , 130.38f, 77.54f ,
138.66f);
path.curveTo (78.48f, 156.39f, 78.11f , 174.59f, 72.59f ,
191.57f);
path.curveTo (70.55f, 198.47f, 66.87f , 205.48f, 64.48f ,
212.26f);
path.curveTo (60.37f, 220.77f, 55.64f , 228.48f, 49.21f ,
235.35f);
path.curveTo (42.93f, 240.92f, 33.51f , 241.52f, 25.97f ,
238.69f);
path.curveTo (16.43f, 235.33f, 12.78f , 222.68f, 16.90f ,
213.66f);
path.curveTo (19.81f, 205.01f, 23.19f , 198.84f, 33.22f ,
191.68f);
path.curveTo (41.60f, 182.17f, 42.95f , 177.91f, 44.24f ,
169.15f);
path.curveTo (45.39f, 158.97f, 44.29f , 149.60f, 43.45f ,
137.43f);
path.curveTo (42.26f, 127.62f, 40.71f , 118.07f, 30.98f ,
110.34f);
path.curveTo (24.67f, 103.68f, 20.99f , 97.09f, 22.55f ,
88.91f);
path.curveTo (25.77f, 75.28f, 36.38f , 74.84f, 43.84f ,
75.16f);
path.curveTo (45.65f, 75.20f, 47.53f , 75.46f, 49.08f ,
76.49f);
path.lineTo (48.72f, 76.28f);
path.closePath();
break;
}
return path;
}
/**
* Internal,
* For shape types composed of multiple basic shapes.
*
* NOTE: These are all being deprecated. They should be
* automatically converted to semantic-free shapes.
*/
static private java.awt.Shape getCombinedShape (Internal st)
{
Area area = new Area();
switch (st)
{
case CELL:
RoundRectangle2D.Double c1 = new RoundRectangle2D.Double(0,0,600,600,100, 100);
RoundRectangle2D.Double c2 = new RoundRectangle2D.Double(11,11,578,578,100, 100);
area.add(new Area(c1));
area.exclusiveOr(new Area(c2));
break;
case NUCLEUS:
Ellipse2D.Double n1 = new Ellipse2D.Double (0, 0, 300, 200);
Ellipse2D.Double n2 = new Ellipse2D.Double (8, 8, 284, 184);
area.add(new Area(n1));
area.exclusiveOr(new Area(n2));
break;
// case MITOCHONDRIA:
// RoundRectangle2D.Double m1 = new RoundRectangle2D.Double (0, 0, 200, 100, 40, 60);
// Ellipse2D.Double m2 = new Ellipse2D.Double (4, 4, 192, 92);
// area.add(new Area(m1));
// area.exclusiveOr(new Area(m2));
// break;
case ORGANELLE:
RoundRectangle2D.Double g1 = new RoundRectangle2D.Double(0,0,200,100,40, 60);
RoundRectangle2D.Double g2 = new RoundRectangle2D.Double(8,8,184,84,40, 60);
area.add(new Area(g1));
area.exclusiveOr(new Area(g2));
break;
case VESICLE:
Ellipse2D.Double v1 = new Ellipse2D.Double (0, 0, 100, 100);
area.add(new Area(v1));
break;
}
return area;
}
static private java.awt.Shape getRegularPolygon (int sides, double w, double h)
{
GeneralPath path = new GeneralPath();
for (int i = 0; i < sides; ++i)
{
double angle = Math.PI * 2 * i / sides;
double x = (w/2) * (1 + Math.cos (angle));
double y = (h/2) * (1 + Math.sin (angle));
if (i == 0)
{
path.moveTo ((float)x, (float)y);
}
else
{
path.lineTo ((float)x, (float)y);
}
}
path.closePath();
return path;
}
} | Golgi, ER and SR shapes updated, now with fewer waypoints. | src/core/org/pathvisio/view/GenMAPPShapes.java | Golgi, ER and SR shapes updated, now with fewer waypoints. | <ide><path>rc/core/org/pathvisio/view/GenMAPPShapes.java
<ide> path.closePath();
<ide> break;
<ide> case SARCOPLASMICRETICULUM:
<del> path.moveTo (83.84f, 11.06f);
<del> path.curveTo (55.38f, 11.53f, 34.05f , 35.28f, 27.55f ,
<del> 59.42f);
<del> path.curveTo (21.34f, 79.07f, 24.23f , 100.01f, 31.18f ,
<del> 119.12f);
<del> path.curveTo (36.92f, 139.13f, 39.24f , 152.44f, 29.02f ,
<del> 171.33f);
<del> path.curveTo (17.17f, 196.63f, 16.73f , 237.34f, 36.28f ,
<del> 259.06f);
<del> path.curveTo (51.15f, 276.87f, 78.30f , 283.82f, 101.03f ,
<del> 276.18f);
<del> path.curveTo (121.32f, 270.59f, 135.08f , 253.36f, 139.86f
<del> , 234.78f);
<del> path.curveTo (144.60f, 217.86f, 141.76f , 191.06f, 133.71f
<del> , 174.98f);
<del> path.curveTo (125.19f, 156.63f, 126.34f , 137.76f, 133.32f
<del> , 118.69f);
<del> path.curveTo (142.15f, 94.73f, 147.49f , 74.12f, 138.15f ,
<del> 49.55f);
<del> path.curveTo (131.47f, 29.83f, 112.90f , 13.08f, 90.17f ,
<del> 11.34f);
<del> path.curveTo (88.07f, 11.13f, 85.95f , 11.04f, 83.84f ,
<del> 11.06f);
<del> path.lineTo (83.84f, 11.06f);
<add> path.moveTo (118.53f, 16.63f);
<add> path.curveTo (34.13f, 22.00f, 23.84f , 107.76f, 49.44f , 169.22f);
<add> path.curveTo (73.73f, 242.63f, 0.51f , 289.88f, 56.13f , 366.83f);
<add> path.curveTo (99.99f, 419.32f, 176.93f , 391.26f, 192.04f , 332.54f);
<add> path.curveTo (207.42f, 271.52f, 163.49f , 228.38f, 183.45f , 168.61f);
<add> path.curveTo (211.75f, 89.03f, 181.43f , 16.01f, 118.53f , 16.63f);
<add> path.lineTo (118.53f, 16.63f);
<ide> path.closePath();
<ide> break;
<ide> case ENDOPLASMICRETICULUM:
<del> path.moveTo (117.65f, 173.00f);
<del> path.curveTo (111.77f, 135.92f, 125.48f , 98.47f, 146.23f
<del> , 66.69f);
<del> path.curveTo (151.89f, 55.92f, 160.75f , 45.46f, 160.84f ,
<del> 33.16f);
<del> path.curveTo (160.30f, 18.31f, 138.48f , 9.17f, 125.67f ,
<del> 19.03f);
<del> path.curveTo (114.97f, 28.67f, 106.07f , 40.84f, 102.50f ,
<del> 54.17f);
<del> path.curveTo (96.72f, 85.23f, 99.58f , 117.17f, 92.40f ,
<del> 148.05f);
<del> path.curveTo (91.84f, 159.08f, 80.85f , 155.60f, 80.24f ,
<del> 146.57f);
<del> path.curveTo (77.74f, 135.64f, 78.88f , 124.33f, 78.47f ,
<del> 113.28f);
<del> path.curveTo (80.31f, 94.13f, 96.96f , 76.32f, 89.59f ,
<del> 56.67f);
<del> path.curveTo (87.26f, 44.36f, 70.19f , 43.28f, 62.75f ,
<del> 52.40f);
<del> path.curveTo (50.92f, 61.79f, 46.77f , 75.95f, 46.31f ,
<del> 89.59f);
<del> path.curveTo (45.11f, 108.24f, 46.91f , 127.30f, 54.97f ,
<del> 144.71f);
<del> path.curveTo (76.04f, 186.21f, 51.55f , 219.27f, 44.39f ,
<del> 192.05f);
<del> path.curveTo (37.12f, 175.84f, 53.90f , 158.19f, 43.13f ,
<del> 142.63f);
<del> path.curveTo (33.49f, 136.38f, 21.53f , 143.47f, 17.78f ,
<del> 152.08f);
<del> path.curveTo (10.80f, 165.14f, 12.97f , 180.15f, 11.50f ,
<del> 194.16f);
<del> path.curveTo (10.56f, 215.22f, 14.93f , 236.91f, 27.06f ,
<del> 255.12f);
<del> path.curveTo (31.46f, 261.52f, 43.73f , 274.00f, 51.82f ,
<del> 262.00f);
<del> path.curveTo (56.61f, 253.46f, 46.74f , 235.42f, 57.95f ,
<del> 232.67f);
<del> path.curveTo (69.97f, 234.28f, 64.92f , 252.09f, 68.86f ,
<del> 261.37f);
<del> path.curveTo (72.48f, 273.75f, 76.44f , 286.90f, 85.75f ,
<del> 296.88f);
<del> path.curveTo (93.26f, 304.63f, 109.74f , 303.34f, 112.06f
<del> , 290.90f);
<del> path.curveTo (113.01f, 281.64f, 108.20f , 273.22f, 102.95f
<del> , 264.08f);
<del> path.curveTo (99.06f, 255.85f, 95.56f , 246.93f, 94.32f ,
<del> 237.86f);
<del> path.curveTo (90.75f, 223.98f, 82.55f , 201.60f, 95.29f ,
<del> 195.34f);
<del> path.curveTo (109.69f, 194.21f, 112.63f , 209.12f, 111.48f
<del> , 218.85f);
<del> path.curveTo (111.42f, 229.31f, 112.01f , 239.46f, 114.31f
<del> , 250.45f);
<del> path.curveTo (115.12f, 263.89f, 119.84f , 278.60f, 133.43f
<del> , 286.05f);
<del> path.curveTo (148.33f, 292.25f, 158.77f , 284.87f, 156.66f
<del> , 273.73f);
<del> path.curveTo (154.44f, 264.05f, 148.78f , 255.16f, 144.56f
<del> , 245.97f);
<del> path.curveTo (137.70f, 233.33f, 129.80f , 220.91f, 126.99f
<del> , 206.95f);
<del> path.curveTo (123.04f, 195.67f, 116.81f , 184.58f, 117.50f
<del> , 172.46f);
<del> path.lineTo (117.65f, 173.00f);
<add> path.moveTo (115.62f, 170.76f);
<add> path.curveTo (106.85f, 115.66f, 152.29f , 74.72f, 152.11f , 37.31f);
<add> path.curveTo (151.57f, 22.91f, 135.75f , 10.96f, 123.59f , 21.51f);
<add> path.curveTo (97.02f, 44.83f, 99.19f , 108.29f, 90.52f , 146.58f);
<add> path.curveTo (89.97f, 157.27f, 79.04f , 153.89f, 78.44f , 145.14f);
<add> path.curveTo (69.32f, 111.41f, 105.16f , 72.62f, 87.74f , 58.00f);
<add> path.curveTo (57.12f, 33.80f, 42.90f , 120.64f, 53.32f , 143.34f);
<add> path.curveTo (65.01f, 185.32f, 49.93f , 215.62f, 42.80f , 189.23f);
<add> path.curveTo (39.00f, 173.52f, 52.26f , 156.40f, 41.55f , 141.32f);
<add> path.curveTo (34.82f, 133.03f, 23.22f , 139.41f, 16.36f , 150.49f);
<add> path.curveTo (0.00f, 182.29f, 23.74f , 271.85f, 49.05f , 257.53f);
<add> path.curveTo (56.38f, 251.73f, 44.01f , 231.76f, 55.14f , 229.10f);
<add> path.curveTo (66.52f, 226.70f, 63.22f , 247.43f, 67.13f , 256.43f);
<add> path.curveTo (70.73f, 268.42f, 74.67f , 281.17f, 83.91f , 290.85f);
<add> path.curveTo (91.38f, 298.36f, 107.76f , 297.10f, 110.06f , 285.05f);
<add> path.curveTo (113.23f, 257.62f, 69.35f , 201.07f, 93.40f , 192.41f);
<add> path.curveTo (122.33f, 184.37f, 100.80f , 263.03f, 131.30f , 280.35f);
<add> path.curveTo (146.12f, 286.36f, 155.69f , 278.51f, 154.40f , 268.41f);
<add> path.curveTo (150.12f, 235.05f, 115.21f , 201.24f, 115.47f , 170.24f);
<add> path.lineTo (115.62f, 170.76f);
<ide> path.closePath();
<ide> break;
<ide> case GOLGIAPPARATUS:
<del> path.moveTo (127.96f, 49.53f);
<del> path.curveTo (121.66f, 31.27f, 138.80f , 7.66f, 156.31f ,
<del> 9.53f);
<del> path.curveTo (173.25f, 7.39f, 191.42f , 27.16f, 188.92f ,
<del> 46.11f);
<del> path.curveTo (186.48f, 61.46f, 179.96f , 70.14f, 178.51f ,
<del> 85.78f);
<del> path.curveTo (177.21f, 106.34f, 175.10f , 125.37f, 174.43f
<del> , 146.77f);
<del> path.curveTo (174.05f, 177.43f, 171.66f , 209.04f, 176.42f
<del> , 239.53f);
<del> path.curveTo (180.30f, 258.62f, 191.10f , 275.62f, 192.45f
<del> , 295.81f);
<del> path.curveTo (192.54f, 311.66f, 182.11f , 325.50f, 166.59f
<del> , 324.58f);
<del> path.curveTo (152.14f, 325.43f, 133.44f , 325.03f, 126.49f
<del> , 309.03f);
<del> path.curveTo (121.02f, 295.74f, 125.62f , 281.07f, 131.05f
<del> , 267.86f);
<del> path.curveTo (136.98f, 249.17f, 140.34f , 231.09f, 143.04f
<del> , 212.10f);
<del> path.curveTo (146.20f, 178.60f, 144.97f , 149.47f, 144.04f
<del> , 115.92f);
<del> path.curveTo (142.28f, 93.04f, 135.66f , 70.93f, 127.96f ,
<del> 49.53f);
<del> path.lineTo (127.96f, 49.53f);
<del> path.closePath();
<del> path.moveTo (77.73f, 59.23f);
<del> path.curveTo (82.35f, 48.65f, 94.32f , 42.79f, 105.18f ,
<del> 45.62f);
<del> path.curveTo (113.70f, 47.00f, 119.21f , 52.21f, 122.69f ,
<del> 61.49f);
<del> path.curveTo (124.37f, 78.66f, 115.43f , 93.85f, 114.27f ,
<del> 110.08f);
<del> path.curveTo (110.14f, 136.19f, 112.49f , 161.25f, 113.20f
<del> , 185.45f);
<del> path.curveTo (115.01f, 204.12f, 116.77f , 217.45f, 122.31f
<del> , 232.89f);
<del> path.curveTo (125.70f, 241.82f, 126.02f , 246.57f, 127.21f
<del> , 254.37f);
<del> path.curveTo (127.05f, 264.38f, 124.03f , 270.98f, 115.53f
<del> , 276.21f);
<del> path.curveTo (103.11f, 280.30f, 93.64f , 277.95f, 83.72f ,
<del> 270.17f);
<del> path.curveTo (77.55f, 261.61f, 77.64f , 258.92f, 78.22f ,
<del> 247.63f);
<del> path.curveTo (78.86f, 241.37f, 80.77f , 237.00f, 83.86f ,
<del> 227.01f);
<del> path.curveTo (89.30f, 212.69f, 87.64f , 201.97f, 88.23f ,
<del> 186.83f);
<del> path.curveTo (88.38f, 166.69f, 87.57f , 132.05f, 86.93f ,
<del> 111.58f);
<del> path.curveTo (83.54f, 83.34f, 74.05f , 78.83f, 77.73f ,
<del> 59.23f);
<del> path.lineTo (77.73f, 59.23f);
<del> path.closePath();
<del> path.moveTo (48.72f, 76.28f);
<del> path.curveTo (56.17f, 78.12f, 61.95f , 84.59f, 66.18f ,
<del> 90.83f);
<del> path.curveTo (70.48f, 98.11f, 73.15f , 105.72f, 75.01f ,
<del> 113.81f);
<del> path.curveTo (76.93f, 121.92f, 76.68f , 130.38f, 77.54f ,
<del> 138.66f);
<del> path.curveTo (78.48f, 156.39f, 78.11f , 174.59f, 72.59f ,
<del> 191.57f);
<del> path.curveTo (70.55f, 198.47f, 66.87f , 205.48f, 64.48f ,
<del> 212.26f);
<del> path.curveTo (60.37f, 220.77f, 55.64f , 228.48f, 49.21f ,
<del> 235.35f);
<del> path.curveTo (42.93f, 240.92f, 33.51f , 241.52f, 25.97f ,
<del> 238.69f);
<del> path.curveTo (16.43f, 235.33f, 12.78f , 222.68f, 16.90f ,
<del> 213.66f);
<del> path.curveTo (19.81f, 205.01f, 23.19f , 198.84f, 33.22f ,
<del> 191.68f);
<del> path.curveTo (41.60f, 182.17f, 42.95f , 177.91f, 44.24f ,
<del> 169.15f);
<del> path.curveTo (45.39f, 158.97f, 44.29f , 149.60f, 43.45f ,
<del> 137.43f);
<del> path.curveTo (42.26f, 127.62f, 40.71f , 118.07f, 30.98f ,
<del> 110.34f);
<del> path.curveTo (24.67f, 103.68f, 20.99f , 97.09f, 22.55f ,
<del> 88.91f);
<del> path.curveTo (25.77f, 75.28f, 36.38f , 74.84f, 43.84f ,
<del> 75.16f);
<del> path.curveTo (45.65f, 75.20f, 47.53f , 75.46f, 49.08f ,
<del> 76.49f);
<del> path.lineTo (48.72f, 76.28f);
<add> path.moveTo (148.89f, 77.62f);
<add> path.curveTo (100.07f, 3.50f, 234.06f , 7.65f, 207.78f , 62.66f);
<add> path.curveTo (187.00f, 106.50f, 171.09f , 190.54f, 209.13f , 287.47f);
<add> path.curveTo (240.55f, 351.33f, 111.35f , 353.69f, 144.36f , 284.72f);
<add> path.curveTo (171.13f, 215.31f, 165.77f , 107.32f, 148.89f , 77.62f);
<add> path.lineTo (148.89f, 77.62f);
<add> path.closePath();
<add> path.moveTo (88.16f, 91.24f);
<add> path.curveTo (62.70f, 40.69f, 158.70f , 44.41f, 131.59f , 92.83f);
<add> path.curveTo (116.28f, 128.91f, 117.95f , 238.10f, 134.33f , 269.85f);
<add> path.curveTo (154.45f, 313.72f, 56.82f , 315.51f, 85.96f , 264.54f);
<add> path.curveTo (102.37f, 223.58f, 110.67f , 141.16f, 88.16f , 91.24f);
<add> path.lineTo (88.16f, 91.24f);
<add> path.closePath();
<add> path.moveTo (83.40f, 133.15f);
<add> path.curveTo (86.43f, 160.23f, 86.72f , 203.15f, 82.05f , 220.09f);
<add> path.curveTo (73.24f, 250.74f, 69.98f , 262.93f, 50.80f , 265.89f);
<add> path.curveTo (32.17f, 265.52f, 22.80f , 242.80f, 39.49f , 227.87f);
<add> path.curveTo (50.94f, 214.61f, 53.98f , 202.20f, 55.20f , 173.72f);
<add> path.curveTo (54.63f, 152.16f, 56.07f , 133.57f, 43.25f , 126.63f);
<add> path.curveTo (25.26f, 121.45f, 30.31f , 86.90f, 56.06f , 93.20f);
<add> path.curveTo (69.86f, 95.63f, 79.23f , 109.03f, 83.40f , 133.15f);
<add> path.lineTo (83.40f, 133.15f);
<ide> path.closePath();
<ide> break;
<ide> } |
|
Java | mit | 69d5760da1f145c58520b6610595056aee27f45f | 0 | echohaha/XChatMessageView | package com.kaneki.xchatmessageview.base;
import android.content.Context;
import android.graphics.Color;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import com.kaneki.xchatmessageview.listener.OnLoadMoreListener;
import java.util.List;
/**
* @author yueqian
* @Desctription
* @date 2017/1/16
* @email [email protected]
*/
public class XChatMessageView<T> extends ViewGroup {
private static final String DEFAULT_BACKGROUND_COLOR = "#f5f5f5";
private Context context;
private RecyclerView recyclerView;
private LinearLayoutManager linearLayoutManager;
private XMessageAdapter messageAdpter;
private OnLoadMoreListener onLoadMoreListener;
private boolean isLoadMore = false;
private int lastPosition = 0;
private int lastOffset = 0;
public XChatMessageView(Context context) {
this(context, null);
}
public XChatMessageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public XChatMessageView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context = context;
initView();
initListener();
}
/**
* 计算控件的大小
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int measureWidth = measureWidth(widthMeasureSpec);
int measureHeight = measureHeight(heightMeasureSpec);
// 计算自定义的ViewGroup中所有子控件的大小
measureChildren(widthMeasureSpec, heightMeasureSpec);
// 设置自定义的控件MyViewGroup的大小
setMeasuredDimension(measureWidth, measureHeight);
}
private int measureWidth(int pWidthMeasureSpec) {
int result = 0;
int widthMode = MeasureSpec.getMode(pWidthMeasureSpec);// 得到模式
int widthSize = MeasureSpec.getSize(pWidthMeasureSpec);// 得到尺寸
switch (widthMode) {
/**
* mode共有三种情况,取值分别为MeasureSpec.UNSPECIFIED, MeasureSpec.EXACTLY,
* MeasureSpec.AT_MOST。
*
*
* MeasureSpec.EXACTLY是精确尺寸,
* 当我们将控件的layout_width或layout_height指定为具体数值时如andorid
* :layout_width="50dip",或者为FILL_PARENT是,都是控件大小已经确定的情况,都是精确尺寸。
*
*
* MeasureSpec.AT_MOST是最大尺寸,
* 当控件的layout_width或layout_height指定为WRAP_CONTENT时
* ,控件大小一般随着控件的子空间或内容进行变化,此时控件尺寸只要不超过父控件允许的最大尺寸即可
* 。因此,此时的mode是AT_MOST,size给出了父控件允许的最大尺寸。
*
*
* MeasureSpec.UNSPECIFIED是未指定尺寸,这种情况不多,一般都是父控件是AdapterView,
* 通过measure方法传入的模式。
*/
case MeasureSpec.AT_MOST:
case MeasureSpec.UNSPECIFIED:
case MeasureSpec.EXACTLY:
result = widthSize;
break;
}
return result;
}
private int measureHeight(int pHeightMeasureSpec) {
int result = 0;
int heightMode = MeasureSpec.getMode(pHeightMeasureSpec);
int heightSize = MeasureSpec.getSize(pHeightMeasureSpec);
switch (heightMode) {
case MeasureSpec.AT_MOST:
case MeasureSpec.UNSPECIFIED:
case MeasureSpec.EXACTLY:
result = heightSize;
break;
}
return result;
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
// 记录总高度
int mTotalHeight = 0;
// 遍历所有子视图
int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
View childView = getChildAt(i);
// 获取在onMeasure中计算的视图尺寸
int measureHeight = childView.getMeasuredHeight();
int measuredWidth = childView.getMeasuredWidth();
childView.layout(l, mTotalHeight, measuredWidth, mTotalHeight
+ measureHeight);
mTotalHeight += measureHeight;
}
}
private void initView() {
recyclerView = new RecyclerView(context);
recyclerView.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.MATCH_PARENT));
recyclerView.setBackgroundColor(Color.parseColor(DEFAULT_BACKGROUND_COLOR));
linearLayoutManager = new LinearLayoutManager(context);
linearLayoutManager.setOrientation(LinearLayoutManager.VERTICAL);
recyclerView.setLayoutManager(linearLayoutManager);
addView(recyclerView);
}
private void initListener() {
recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
if (messageAdpter.isNeedLoadMore())
saveCurrent();
}
});
}
private void saveCurrent() {
int pos = linearLayoutManager.findFirstVisibleItemPosition();
if (pos == 0 && !isLoadMore) {
//获取headerView高度
View headerView = linearLayoutManager.getChildAt(0);
//获取可视的第一个view
View firstView = linearLayoutManager.getChildAt(1);
if (headerView != null && firstView != null) {
//获取与该view的顶部的偏移量
lastOffset = firstView.getTop() + headerView.getHeight();
//得到该View的数组位置
lastPosition = linearLayoutManager.getPosition(headerView);
isLoadMore = true;
onLoadMoreListener.onLoadMore();
}
} else {
//获取headerView高度
View currentView = linearLayoutManager.getChildAt(pos);
if (currentView != null) {
//获取与该view的顶部的偏移量
lastOffset = currentView.getTop();
//得到该View的数组位置
lastPosition = linearLayoutManager.getPosition(currentView);
}
}
}
private void resumeSave(int changeSize) {
linearLayoutManager.scrollToPositionWithOffset(lastPosition + changeSize, lastOffset);
}
/**
* set message adapter, the adpter should extend XMessageAdapter.
* @param messageAdapter
*/
public void setMessageAdapter(XMessageAdapter messageAdapter) {
this.messageAdpter = messageAdapter;
recyclerView.setAdapter(messageAdapter);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* set message load more listener, it calls when the header is visibile and only
* calls once when trigger.
* @param onLoadMoreListener
*/
public void setOnLoadMoreListener(OnLoadMoreListener onLoadMoreListener) {
this.onLoadMoreListener = onLoadMoreListener;
}
/**
* get message adpater, it may return null if setMessageAdapter method whit null set.
* @return
*/
public XMessageAdapter getMessageAdpter() {
return messageAdpter;
}
/**
* toggle the load more header, it should be call before the datas change.
* @param isNeedLoadMore
*/
public void setIsNeedLoadMore(boolean isNeedLoadMore) {
messageAdpter.setNeedLoadMore(isNeedLoadMore);
}
/**
* return the view's position on the XChatMessageView, the view should come from the XViewHolder.
* @param view
* @return
*/
public int getMessageItemPosition(View view) {
return linearLayoutManager == null ? -1 : linearLayoutManager.getPosition(view);
}
/**
* add a new message at the last of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param t
*/
@SuppressWarnings("unchecked")
public void addMessageAtLast(T t) {
messageAdpter.addMessageAtLast(t);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* add messages at the last of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param tList
*/
@SuppressWarnings("unchecked")
public void addMoreMessageAtLast(List<T> tList) {
messageAdpter.addMoreMessageAtLast(tList);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* add messages at the first of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param tList
*/
@SuppressWarnings("unchecked")
public void addMoreMessageAtFirst(List<T> tList) {
messageAdpter.addMoreMessageAtFirst(tList);
if (!messageAdpter.isNeedLoadMore())
resumeSave(tList.size());
isLoadMore = false;
}
/**
* remove the message on the XChatMessageView, the view should come from the XViewHolder callback.
* @param view
*/
public void reomveMessage(View view) {
int pos = linearLayoutManager.getPosition(view);
messageAdpter.removeMessageAtPosition(pos);
}
public void scrollToBottom() {
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
public void saveCurrentStatus() {
saveCurrent();
}
public void resumeSaveStatus() {
resumeSave(0);
}
}
| library/src/main/java/com/kaneki/xchatmessageview/base/XChatMessageView.java | package com.kaneki.xchatmessageview.base;
import android.content.Context;
import android.graphics.Color;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import com.kaneki.xchatmessageview.listener.OnLoadMoreListener;
import java.util.List;
/**
* @author yueqian
* @Desctription
* @date 2017/1/16
* @email [email protected]
*/
public class XChatMessageView<T> extends ViewGroup {
private static final String DEFAULT_BACKGROUND_COLOR = "#f5f5f5";
private Context context;
private RecyclerView recyclerView;
private LinearLayoutManager linearLayoutManager;
private XMessageAdapter messageAdpter;
private OnLoadMoreListener onLoadMoreListener;
private boolean isLoadMore = false;
private int lastPosition = 0;
private int lastOffset = 0;
public XChatMessageView(Context context) {
this(context, null);
}
public XChatMessageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public XChatMessageView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context = context;
initView();
initListener();
}
/**
* 计算控件的大小
*/
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int measureWidth = measureWidth(widthMeasureSpec);
int measureHeight = measureHeight(heightMeasureSpec);
// 计算自定义的ViewGroup中所有子控件的大小
measureChildren(widthMeasureSpec, heightMeasureSpec);
// 设置自定义的控件MyViewGroup的大小
setMeasuredDimension(measureWidth, measureHeight);
}
private int measureWidth(int pWidthMeasureSpec) {
int result = 0;
int widthMode = MeasureSpec.getMode(pWidthMeasureSpec);// 得到模式
int widthSize = MeasureSpec.getSize(pWidthMeasureSpec);// 得到尺寸
switch (widthMode) {
/**
* mode共有三种情况,取值分别为MeasureSpec.UNSPECIFIED, MeasureSpec.EXACTLY,
* MeasureSpec.AT_MOST。
*
*
* MeasureSpec.EXACTLY是精确尺寸,
* 当我们将控件的layout_width或layout_height指定为具体数值时如andorid
* :layout_width="50dip",或者为FILL_PARENT是,都是控件大小已经确定的情况,都是精确尺寸。
*
*
* MeasureSpec.AT_MOST是最大尺寸,
* 当控件的layout_width或layout_height指定为WRAP_CONTENT时
* ,控件大小一般随着控件的子空间或内容进行变化,此时控件尺寸只要不超过父控件允许的最大尺寸即可
* 。因此,此时的mode是AT_MOST,size给出了父控件允许的最大尺寸。
*
*
* MeasureSpec.UNSPECIFIED是未指定尺寸,这种情况不多,一般都是父控件是AdapterView,
* 通过measure方法传入的模式。
*/
case MeasureSpec.AT_MOST:
case MeasureSpec.UNSPECIFIED:
case MeasureSpec.EXACTLY:
result = widthSize;
break;
}
return result;
}
private int measureHeight(int pHeightMeasureSpec) {
int result = 0;
int heightMode = MeasureSpec.getMode(pHeightMeasureSpec);
int heightSize = MeasureSpec.getSize(pHeightMeasureSpec);
switch (heightMode) {
case MeasureSpec.AT_MOST:
case MeasureSpec.UNSPECIFIED:
case MeasureSpec.EXACTLY:
result = heightSize;
break;
}
return result;
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
// 记录总高度
int mTotalHeight = 0;
// 遍历所有子视图
int childCount = getChildCount();
for (int i = 0; i < childCount; i++) {
View childView = getChildAt(i);
// 获取在onMeasure中计算的视图尺寸
int measureHeight = childView.getMeasuredHeight();
int measuredWidth = childView.getMeasuredWidth();
childView.layout(l, mTotalHeight, measuredWidth, mTotalHeight
+ measureHeight);
mTotalHeight += measureHeight;
}
}
private void initView() {
recyclerView = new RecyclerView(context);
recyclerView.setLayoutParams(new RecyclerView.LayoutParams(RecyclerView.LayoutParams.MATCH_PARENT, RecyclerView.LayoutParams.MATCH_PARENT));
recyclerView.setBackgroundColor(Color.parseColor(DEFAULT_BACKGROUND_COLOR));
linearLayoutManager = new LinearLayoutManager(context);
linearLayoutManager.setOrientation(LinearLayoutManager.VERTICAL);
recyclerView.setLayoutManager(linearLayoutManager);
addView(recyclerView);
}
private void initListener() {
recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
if (messageAdpter.isNeedLoadMore())
saveCurrent();
}
});
}
private void saveCurrent() {
int pos = linearLayoutManager.findFirstVisibleItemPosition();
if (pos == 0 && !isLoadMore) {
//获取headerView高度
View headerView = linearLayoutManager.getChildAt(0);
//获取可视的第一个view
View firstView = linearLayoutManager.getChildAt(1);
if (headerView != null && firstView != null) {
//获取与该view的顶部的偏移量
lastOffset = firstView.getTop() + headerView.getHeight();
//得到该View的数组位置
lastPosition = linearLayoutManager.getPosition(headerView);
isLoadMore = true;
onLoadMoreListener.onLoadMore();
}
} else {
//获取headerView高度
View currentView = linearLayoutManager.getChildAt(pos);
if (currentView != null) {
//获取与该view的顶部的偏移量
lastOffset = currentView.getTop();
//得到该View的数组位置
lastPosition = linearLayoutManager.getPosition(currentView);
}
}
}
private void resumeSave(int changeSize) {
linearLayoutManager.scrollToPositionWithOffset(lastPosition + changeSize, lastOffset);
}
/**
* set message adapter, the adpter should extend XMessageAdapter.
* @param messageAdapter
*/
public void setMessageAdapter(XMessageAdapter messageAdapter) {
this.messageAdpter = messageAdapter;
recyclerView.setAdapter(messageAdapter);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* set message load more listener, it calls when the header is visibile and only
* calls once when trigger.
* @param onLoadMoreListener
*/
public void setOnLoadMoreListener(OnLoadMoreListener onLoadMoreListener) {
this.onLoadMoreListener = onLoadMoreListener;
}
/**
* get message adpater, it may return null if setMessageAdapter method whit null set.
* @return
*/
public XMessageAdapter getMessageAdpter() {
return messageAdpter;
}
/**
* toggle the load more header, it should be call before the datas change.
* @param isNeedLoadMore
*/
public void setIsNeedLoadMore(boolean isNeedLoadMore) {
messageAdpter.setNeedLoadMore(isNeedLoadMore);
}
/**
* return the view's position on the XChatMessageView, the view should come from the XViewHolder.
* @param view
* @return
*/
public int getMessageItemPosition(View view) {
return linearLayoutManager == null ? -1 : linearLayoutManager.getPosition(view);
}
/**
* add a new message at the last of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param t
*/
@SuppressWarnings("unchecked")
public void addMessageAtLast(T t) {
messageAdpter.addMessageAtLast(t);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* add messages at the last of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param tList
*/
@SuppressWarnings("unchecked")
public void addMoreMessageAtLast(List<T> tList) {
messageAdpter.addMoreMessageAtLast(tList);
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
/**
* add messages at the first of the XChatMessageView, the message should as same as the T of the
* XViewHolder or XMessageAdapter.
* @param tList
*/
@SuppressWarnings("unchecked")
public void addMoreMessageAtFirst(List<T> tList) {
messageAdpter.addMoreMessageAtFirst(tList);
if (!messageAdpter.isNeedLoadMore())
resumeSave(tList.size());
isLoadMore = false;
}
public void reomveMessage(View view) {
int pos = linearLayoutManager.getPosition(view);
messageAdpter.removeMessageAtPosition(pos);
}
public void scrollToBottom() {
recyclerView.scrollToPosition(messageAdpter.getItemCount() - 1);
}
public void saveCurrentStatus() {
saveCurrent();
}
public void resumeSaveStatus() {
resumeSave(0);
}
}
| yueqian:增加注释
| library/src/main/java/com/kaneki/xchatmessageview/base/XChatMessageView.java | yueqian:增加注释 | <ide><path>ibrary/src/main/java/com/kaneki/xchatmessageview/base/XChatMessageView.java
<ide> isLoadMore = false;
<ide> }
<ide>
<add> /**
<add> * remove the message on the XChatMessageView, the view should come from the XViewHolder callback.
<add> * @param view
<add> */
<ide> public void reomveMessage(View view) {
<ide> int pos = linearLayoutManager.getPosition(view);
<ide> messageAdpter.removeMessageAtPosition(pos); |
|
Java | apache-2.0 | e26e9a00dbd1a56c9d578b381b5b547f8538fb4d | 0 | wcm-io-caravan/caravan-io,wcm-io-caravan/caravan-io,wcm-io-caravan/caravan-io | /*
* #%L
* wcm.io
* %%
* Copyright (C) 2015 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.caravan.io.http.impl.servletclient;
import io.wcm.caravan.io.http.response.CaravanHttpResponse;
import io.wcm.caravan.io.http.response.CaravanHttpResponseBuilder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import com.google.common.base.Charsets;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
/**
* Mapper from {@link CaravanHttpResponse} to {@link HttpServletResponse}.
*/
public class HttpServletResponseMapper implements HttpServletResponse {
private String characterEncoding = Charsets.UTF_8.toString();
private String contentType;
private ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
private int bufferSize = 4096;
private Locale locale;
private final List<Cookie> cookies = Lists.newArrayList();
private final Multimap<String, String> headers = HashMultimap.create();
private int status = HttpServletResponse.SC_OK;
private String reason = "OK";
@Override
public String getCharacterEncoding() {
return characterEncoding;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public ServletOutputStream getOutputStream() throws IOException {
return new ServletOutputStream() {
@Override
public void write(int b) throws IOException {
outputStream.write(b);
}
};
}
@Override
public PrintWriter getWriter() throws IOException {
return new PrintWriter(outputStream);
}
@Override
public void setCharacterEncoding(String charset) {
characterEncoding = charset;
}
@Override
public void setContentLength(int len) {
setIntHeader("Content-Length", len);
// if the content length is known in advance then resize the ByteArrayOutputSteram accordingly
// (but only if nothing has yet been written to the output stream)
if (len > 0 && outputStream.size() == 0) {
outputStream = new ByteArrayOutputStream(len);
}
}
@Override
public void setContentType(String type) {
contentType = type;
}
@Override
public void setBufferSize(int size) {
bufferSize = size;
}
@Override
public int getBufferSize() {
return bufferSize;
}
@Override
public void flushBuffer() throws IOException {
outputStream.flush();
}
@Override
public void resetBuffer() {
outputStream.reset();
}
@Override
public boolean isCommitted() {
return false;
}
@Override
public void reset() {
resetBuffer();
this.characterEncoding = null;
this.contentType = null;
this.locale = null;
this.cookies.clear();
this.headers.clear();
this.status = HttpServletResponse.SC_OK;
this.reason = null;
}
@Override
public void setLocale(Locale loc) {
locale = loc;
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public void addCookie(Cookie cookie) {
cookies.add(cookie);
}
@Override
public boolean containsHeader(String name) {
return headers.containsKey(name);
}
@Override
public String encodeURL(String url) {
return url;
}
@Override
public String encodeRedirectURL(String url) {
return encodeURL(url);
}
@Override
public String encodeUrl(String url) {
return encodeURL(url);
}
@Override
public String encodeRedirectUrl(String url) {
return encodeRedirectURL(url);
}
@Override
public void sendError(int sc, String msg) throws IOException {
status = sc;
reason = msg;
}
@Override
public void sendError(int sc) throws IOException {
status = sc;
}
@Override
public void sendRedirect(String location) throws IOException {
setHeader("Location", location);
setStatus(HttpServletResponse.SC_MOVED_TEMPORARILY);
}
@Override
public void setDateHeader(String name, long date) {
String value = formatDate(date);
setHeader(name, value);
}
private String formatDate(long date) {
SimpleDateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
return dateFormat.format(new Date(date));
}
@Override
public void addDateHeader(String name, long date) {
String value = formatDate(date);
addHeader(name, value);
}
@Override
public void setHeader(String name, String value) {
headers.replaceValues(name, Collections.singletonList(value));
}
@Override
public void addHeader(String name, String value) {
headers.put(name, value);
}
@Override
public void setIntHeader(String name, int value) {
setHeader(name, String.valueOf(value));
}
@Override
public void addIntHeader(String name, int value) {
addHeader(name, String.valueOf(value));
}
@Override
public void setStatus(int sc) {
status = sc;
}
@Override
public void setStatus(int sc, String sm) {
setStatus(sc);
reason = sm;
}
@Override
public int getStatus() {
return status;
}
@Override
public String getHeader(String name) {
Collection<String> values = headers.get(name);
return values.isEmpty() ? null : values.iterator().next();
}
@Override
public Collection<String> getHeaders(String name) {
return headers.get(name);
}
@Override
public Collection<String> getHeaderNames() {
return headers.keySet();
}
public CaravanHttpResponse getResponse() {
return new CaravanHttpResponseBuilder()
.body(outputStream.toByteArray())
.headers(headers)
.reason(reason)
.status(status)
.build();
}
}
| http/src/main/java/io/wcm/caravan/io/http/impl/servletclient/HttpServletResponseMapper.java | /*
* #%L
* wcm.io
* %%
* Copyright (C) 2015 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.caravan.io.http.impl.servletclient;
import io.wcm.caravan.io.http.response.CaravanHttpResponse;
import io.wcm.caravan.io.http.response.CaravanHttpResponseBuilder;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import com.google.common.base.Charsets;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
/**
* Mapper from {@link CaravanHttpResponse} to {@link HttpServletResponse}.
*/
public class HttpServletResponseMapper implements HttpServletResponse {
private String characterEncoding = Charsets.UTF_8.toString();
private String contentType;
private ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
private int bufferSize = 4096;
private Locale locale;
private final List<Cookie> cookies = Lists.newArrayList();
private final Multimap<String, String> headers = HashMultimap.create();
private int status = HttpServletResponse.SC_OK;
private String reason = "OK";
@Override
public String getCharacterEncoding() {
return characterEncoding;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public ServletOutputStream getOutputStream() throws IOException {
return new ServletOutputStream() {
@Override
public void write(int b) throws IOException {
outputStream.write(b);
}
};
}
@Override
public PrintWriter getWriter() throws IOException {
return new PrintWriter(outputStream);
}
@Override
public void setCharacterEncoding(String charset) {
characterEncoding = charset;
}
@Override
public void setContentLength(int len) {
setIntHeader("Content-Length", len);
}
@Override
public void setContentType(String type) {
contentType = type;
}
@Override
public void setBufferSize(int size) {
bufferSize = size;
}
@Override
public int getBufferSize() {
return bufferSize;
}
@Override
public void flushBuffer() throws IOException {
outputStream.flush();
}
@Override
public void resetBuffer() {
outputStream.reset();
}
@Override
public boolean isCommitted() {
return false;
}
@Override
public void reset() {
resetBuffer();
this.characterEncoding = null;
this.contentType = null;
this.locale = null;
this.cookies.clear();
this.headers.clear();
this.status = HttpServletResponse.SC_OK;
this.reason = null;
}
@Override
public void setLocale(Locale loc) {
locale = loc;
}
@Override
public Locale getLocale() {
return locale;
}
@Override
public void addCookie(Cookie cookie) {
cookies.add(cookie);
}
@Override
public boolean containsHeader(String name) {
return headers.containsKey(name);
}
@Override
public String encodeURL(String url) {
return url;
}
@Override
public String encodeRedirectURL(String url) {
return encodeURL(url);
}
@Override
public String encodeUrl(String url) {
return encodeURL(url);
}
@Override
public String encodeRedirectUrl(String url) {
return encodeRedirectURL(url);
}
@Override
public void sendError(int sc, String msg) throws IOException {
status = sc;
reason = msg;
}
@Override
public void sendError(int sc) throws IOException {
status = sc;
}
@Override
public void sendRedirect(String location) throws IOException {
setHeader("Location", location);
setStatus(HttpServletResponse.SC_MOVED_TEMPORARILY);
}
@Override
public void setDateHeader(String name, long date) {
String value = formatDate(date);
setHeader(name, value);
}
private String formatDate(long date) {
SimpleDateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
return dateFormat.format(new Date(date));
}
@Override
public void addDateHeader(String name, long date) {
String value = formatDate(date);
addHeader(name, value);
}
@Override
public void setHeader(String name, String value) {
headers.replaceValues(name, Collections.singletonList(value));
}
@Override
public void addHeader(String name, String value) {
headers.put(name, value);
}
@Override
public void setIntHeader(String name, int value) {
setHeader(name, String.valueOf(value));
}
@Override
public void addIntHeader(String name, int value) {
addHeader(name, String.valueOf(value));
}
@Override
public void setStatus(int sc) {
status = sc;
}
@Override
public void setStatus(int sc, String sm) {
setStatus(sc);
reason = sm;
}
@Override
public int getStatus() {
return status;
}
@Override
public String getHeader(String name) {
Collection<String> values = headers.get(name);
return values.isEmpty() ? null : values.iterator().next();
}
@Override
public Collection<String> getHeaders(String name) {
return headers.get(name);
}
@Override
public Collection<String> getHeaderNames() {
return headers.keySet();
}
public CaravanHttpResponse getResponse() {
return new CaravanHttpResponseBuilder()
.body(outputStream.toByteArray())
.headers(headers)
.reason(reason)
.status(status)
.build();
}
}
| if the content length is known in advance then resize the ByteArrayOutputSteram accordingly
| http/src/main/java/io/wcm/caravan/io/http/impl/servletclient/HttpServletResponseMapper.java | if the content length is known in advance then resize the ByteArrayOutputSteram accordingly | <ide><path>ttp/src/main/java/io/wcm/caravan/io/http/impl/servletclient/HttpServletResponseMapper.java
<ide> @Override
<ide> public void setContentLength(int len) {
<ide> setIntHeader("Content-Length", len);
<add>
<add> // if the content length is known in advance then resize the ByteArrayOutputSteram accordingly
<add> // (but only if nothing has yet been written to the output stream)
<add> if (len > 0 && outputStream.size() == 0) {
<add> outputStream = new ByteArrayOutputStream(len);
<add> }
<ide> }
<ide>
<ide> @Override |
|
Java | mit | 871f80e7652b3675e40c02bb7b606f303a391dae | 0 | Loscillo/DACv2-temp | package fr.aumgn.dac2.game;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import org.bukkit.event.player.PlayerTeleportEvent;
import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause;
import fr.aumgn.dac2.arena.Arena;
/**
* Listener which provides common implementations of listeners
* for most game mode.
*/
public class GameListener implements Listener {
private final AbstractGame game;
private final Arena arena;
public GameListener(AbstractGame game) {
this.game = game;
this.arena = game.getArena();
}
@EventHandler(priority = EventPriority.MONITOR)
public void onMove(PlayerMoveEvent event) {
Player player = event.getPlayer();
if (!game.isPlayerTurn(player)) {
return;
}
if (!(arena.isIn(player.getWorld())
&& arena.getPool().contains(player))) {
return;
}
game.onJumpSuccess(player);
}
public void onTeleport(PlayerTeleportEvent event){
if(game.isPlayerTurn(event.getPlayer()) && !(event.getCause().equals(TeleportCause.COMMAND)))
game.onJumpFail(event.getPlayer());
}
@EventHandler(priority = EventPriority.HIGH)
public void onDamage(EntityDamageEvent event) {
if (event.getCause() != DamageCause.FALL) {
return;
}
if (!(event.getEntity() instanceof Player)) {
return;
}
Player player = (Player) event.getEntity();
if (!game.isPlayerTurn(player)) {
return;
}
if (!(arena.isIn(player.getWorld()) && arena.getSurroundingRegion()
.contains(player))) {
return;
}
game.onJumpFail(player);
event.setCancelled(true);
}
@EventHandler(priority = EventPriority.MONITOR)
public void onRawQuit(PlayerQuitEvent event) {
Player player = event.getPlayer();
if (!game.isPlayerTurn(player)) {
return;
}
game.onQuit(player);
}
}
| src/main/java/fr/aumgn/dac2/game/GameListener.java | package fr.aumgn.dac2.game;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import fr.aumgn.dac2.arena.Arena;
/**
* Listener which provides common implementations of listeners
* for most game mode.
*/
public class GameListener implements Listener {
private final AbstractGame game;
private final Arena arena;
public GameListener(AbstractGame game) {
this.game = game;
this.arena = game.getArena();
}
@EventHandler(priority = EventPriority.MONITOR)
public void onMove(PlayerMoveEvent event) {
Player player = event.getPlayer();
if (!game.isPlayerTurn(player)) {
return;
}
if (!(arena.isIn(player.getWorld())
&& arena.getPool().contains(player))) {
return;
}
game.onJumpSuccess(player);
}
@EventHandler(priority = EventPriority.HIGH)
public void onDamage(EntityDamageEvent event) {
if (event.getCause() != DamageCause.FALL) {
return;
}
if (!(event.getEntity() instanceof Player)) {
return;
}
Player player = (Player) event.getEntity();
if (!game.isPlayerTurn(player)) {
return;
}
if (!(arena.isIn(player.getWorld()) && arena.getSurroundingRegion()
.contains(player))) {
return;
}
game.onJumpFail(player);
event.setCancelled(true);
}
@EventHandler(priority = EventPriority.MONITOR)
public void onRawQuit(PlayerQuitEvent event) {
Player player = event.getPlayer();
if (!game.isPlayerTurn(player)) {
return;
}
game.onQuit(player);
}
}
| Protection contre la téléportation
| src/main/java/fr/aumgn/dac2/game/GameListener.java | Protection contre la téléportation | <ide><path>rc/main/java/fr/aumgn/dac2/game/GameListener.java
<ide> import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
<ide> import org.bukkit.event.player.PlayerMoveEvent;
<ide> import org.bukkit.event.player.PlayerQuitEvent;
<add>import org.bukkit.event.player.PlayerTeleportEvent;
<add>import org.bukkit.event.player.PlayerTeleportEvent.TeleportCause;
<ide>
<ide> import fr.aumgn.dac2.arena.Arena;
<ide>
<ide> game.onJumpSuccess(player);
<ide> }
<ide>
<add> public void onTeleport(PlayerTeleportEvent event){
<add> if(game.isPlayerTurn(event.getPlayer()) && !(event.getCause().equals(TeleportCause.COMMAND)))
<add> game.onJumpFail(event.getPlayer());
<add> }
<add>
<ide> @EventHandler(priority = EventPriority.HIGH)
<ide> public void onDamage(EntityDamageEvent event) {
<ide> if (event.getCause() != DamageCause.FALL) { |
|
JavaScript | mit | 90622a20af55c56d61238ca49e20207c4f83ac50 | 0 | therewasaguy/p5.js-sound,therewasaguy/p5.js-sound,processing/p5.js-sound,processing/p5.js-sound | define(function (require) {
'use strict';
var p5sound = require('master');
require('sndcore');
/**
* A gain node is usefull to set the relative volume of sound.
* It's typically used to build mixers.
*
* @class p5.Gain
* @constructor
* @example
* <div><code>
*
* // load two soundfile and crossfade beetween them
* var sound1,sound2;
* var gain1, gain2, gain3;
*
* function preload(){
* soundFormats('ogg', 'mp3');
* sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01');
* sound2 = loadSound('../_files/beat.mp3');
* }
*
* function setup() {
* createCanvas(400,200);
*
* // create a 'master' gain to which we will connect both soundfiles
* gain3 = new p5.Gain();
* gain3.connect();
*
* // setup first sound for playing
* sound1.rate(1);
* sound1.loop();
* sound1.disconnect(); // diconnect from p5 output
*
* gain1 = new p5.Gain(); // setup a gain node
* gain1.setInput(sound1); // connect the first sound to its input
* gain1.connect(gain3); // connect its output to the 'master'
*
* sound2.rate(1);
* sound2.disconnect();
* sound2.loop();
*
* gain2 = new p5.Gain();
* gain2.setInput(sound2);
* gain2.connect(gain3);
*
* }
*
* function draw(){
* background(180);
*
* // calculate the horizontal distance beetween the mouse and the right of the screen
* var d = dist(mouseX,0,width,0);
*
* // map the horizontal position of the mouse to values useable for volume control of sound1
* var vol1 = map(mouseX,0,width,0,1);
* var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
*
* gain1.amp(vol1,0.5,0);
* gain2.amp(vol2,0.5,0);
*
* // map the vertical position of the mouse to values useable for 'master volume control'
* var vol3 = map(mouseY,0,height,0,1);
* gain3.amp(vol3,0.5,0);
* }
*</code></div>
*
*/
p5.Gain = function() {
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
// otherwise, Safari distorts
this.input.gain.value = 0.5;
this.input.connect(this.output);
}
/**
* Connect a source to the gain node.
*
* @method setInput
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
*/
p5.Gain.prototype.setInput = function(src) {
src.connect(this.input);
}
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Gain.prototype.connect = function(unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u.input ? u.input : u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Gain.prototype.disconnect = function() {
this.output.disconnect();
};
/**
* Set the output level of the gain node.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Gain.prototype.amp = function(vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
};
});
| src/gain.js | define(function (require) {
'use strict';
var p5sound = require('master');
require('sndcore');
/**
* A gain node is usefull to set the relative volume of sound.
* It's typically used to build mixers.
*
* @class p5.Gain
* @constructor
* @example
* <div><code>
*
* // load two soundfile and crossfade beetween them
* var sound1,sound2;
* var gain1, gain2, gain3;
*
* function preload(){
* soundFormats('ogg', 'mp3');
* sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01');
* sound2 = loadSound('../_files/beat.mp3');
* }
*
* function setup() {
* createCanvas(400,200);
*
* // create a 'master' gain to which we will connect both soundfiles
* gain3 = new p5.Gain();
* gain3.connect();
*
* // setup first sound for playing
* sound1.rate(1);
* sound1.loop();
* sound1.disconnect(); // diconnect from p5 output
*
* gain1 = new p5.Gain(); // setup a gain node
* gain1.setInput(sound1); // connect the first sound to its input
* gain1.connect(gain3); // connect its output to the 'master'
*
* sound2.rate(1);
* sound2.disconnect();
* sound2.loop();
*
* gain2 = new p5.Gain();
* gain2.setInput(sound2);
* gain2.connect(gain3);
*
* }
*
* function draw(){
* background(180);
*
* // calculate the horizontal distance beetween the mouse and the right of the screen
* var d = dist(mouseX,0,width,0);
*
* // map the horizontal position of the mouse to values useable for volume control of sound1
* var vol1 = map(mouseX,0,width,0,1);
* var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
*
* gain1.amp(vol1,0.5,0);
* gain2.amp(vol2,0.5,0);
*
* // map the vertical position of the mouse to values useable for 'master volume control'
* var vol3 = map(mouseY,0,height,0,1);
* gain3.amp(vol3,0.5,0);
* }
*</code></div>
*
*/
p5.Gain = function() {
this.ac = p5sound.audiocontext;
this.input = this.ac.createGain();
this.output = this.ac.createGain();
// otherwise, Safari distorts
this.input.gain.value = 0.5;
this.input.connect(this.output);
}
/**
* Connect a source to the gain node.
*
* @method setInput
* @param {Object} src p5.sound / Web Audio object with a sound
* output.
*/
p5.Gain.prototype.setInput = function(src) {
src.connect(this.input);
}
/**
* Send output to a p5.sound or web audio object
*
* @method connect
* @param {Object} unit
*/
p5.Gain.prototype.connect = function(unit) {
var u = unit || p5.soundOut.input;
this.output.connect(u.input ? u.input : u);
};
/**
* Disconnect all output.
*
* @method disconnect
*/
p5.Gain.prototype.disconnect = function() {
this.output.disconnect();
};
/**
* Set the output level of the gain node.
*
* @method amp
* @param {Number} volume amplitude between 0 and 1.0
* @param {Number} [rampTime] create a fade that lasts rampTime
* @param {Number} [timeFromNow] schedule this event to happen
* seconds from now
*/
p5.Gain.prototype.amp = function(vol, rampTime, tFromNow) {
var rampTime = rampTime || 0;
var tFromNow = tFromNow || 0;
var now = p5sound.audiocontext.currentTime;
var currentVol = this.output.gain.value;
this.output.gain.cancelScheduledValues(now);
this.output.gain.linearRampToValueAtTime(currentVol, now + tFromNow);
this.output.gain.linearRampToValueAtTime(vol, now + tFromNow + rampTime);
};
});
| convert tabs to spaces
| src/gain.js | convert tabs to spaces | <ide><path>rc/gain.js
<ide>
<ide> /**
<ide> * A gain node is usefull to set the relative volume of sound.
<del> * It's typically used to build mixers.
<add> * It's typically used to build mixers.
<ide> *
<ide> * @class p5.Gain
<ide> * @constructor
<ide> * @example
<ide> * <div><code>
<del> *
<del> * // load two soundfile and crossfade beetween them
<del> * var sound1,sound2;
<del> * var gain1, gain2, gain3;
<del> *
<del> * function preload(){
<del> * soundFormats('ogg', 'mp3');
<del> * sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01');
<del> * sound2 = loadSound('../_files/beat.mp3');
<del> * }
<del> *
<del> * function setup() {
<del> * createCanvas(400,200);
<del> *
<del> * // create a 'master' gain to which we will connect both soundfiles
<del> * gain3 = new p5.Gain();
<del> * gain3.connect();
<del> *
<del> * // setup first sound for playing
<del> * sound1.rate(1);
<del> * sound1.loop();
<del> * sound1.disconnect(); // diconnect from p5 output
<del> *
<del> * gain1 = new p5.Gain(); // setup a gain node
<del> * gain1.setInput(sound1); // connect the first sound to its input
<del> * gain1.connect(gain3); // connect its output to the 'master'
<del> *
<del> * sound2.rate(1);
<del> * sound2.disconnect();
<del> * sound2.loop();
<del> *
<del> * gain2 = new p5.Gain();
<del> * gain2.setInput(sound2);
<del> * gain2.connect(gain3);
<del> *
<del> * }
<del> *
<del> * function draw(){
<del> * background(180);
<del> *
<del> * // calculate the horizontal distance beetween the mouse and the right of the screen
<del> * var d = dist(mouseX,0,width,0);
<del> *
<del> * // map the horizontal position of the mouse to values useable for volume control of sound1
<del> * var vol1 = map(mouseX,0,width,0,1);
<del> * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
<del> *
<del> * gain1.amp(vol1,0.5,0);
<del> * gain2.amp(vol2,0.5,0);
<del> *
<del> * // map the vertical position of the mouse to values useable for 'master volume control'
<del> * var vol3 = map(mouseY,0,height,0,1);
<del> * gain3.amp(vol3,0.5,0);
<del> * }
<del> *</code></div>
<del> *
<del> */
<add> *
<add> * // load two soundfile and crossfade beetween them
<add> * var sound1,sound2;
<add> * var gain1, gain2, gain3;
<add> *
<add> * function preload(){
<add> * soundFormats('ogg', 'mp3');
<add> * sound1 = loadSound('../_files/Damscray_-_Dancing_Tiger_01');
<add> * sound2 = loadSound('../_files/beat.mp3');
<add> * }
<add> *
<add> * function setup() {
<add> * createCanvas(400,200);
<add> *
<add> * // create a 'master' gain to which we will connect both soundfiles
<add> * gain3 = new p5.Gain();
<add> * gain3.connect();
<add> *
<add> * // setup first sound for playing
<add> * sound1.rate(1);
<add> * sound1.loop();
<add> * sound1.disconnect(); // diconnect from p5 output
<add> *
<add> * gain1 = new p5.Gain(); // setup a gain node
<add> * gain1.setInput(sound1); // connect the first sound to its input
<add> * gain1.connect(gain3); // connect its output to the 'master'
<add> *
<add> * sound2.rate(1);
<add> * sound2.disconnect();
<add> * sound2.loop();
<add> *
<add> * gain2 = new p5.Gain();
<add> * gain2.setInput(sound2);
<add> * gain2.connect(gain3);
<add> *
<add> * }
<add> *
<add> * function draw(){
<add> * background(180);
<add> *
<add> * // calculate the horizontal distance beetween the mouse and the right of the screen
<add> * var d = dist(mouseX,0,width,0);
<add> *
<add> * // map the horizontal position of the mouse to values useable for volume control of sound1
<add> * var vol1 = map(mouseX,0,width,0,1);
<add> * var vol2 = 1-vol1; // when sound1 is loud, sound2 is quiet and vice versa
<add> *
<add> * gain1.amp(vol1,0.5,0);
<add> * gain2.amp(vol2,0.5,0);
<add> *
<add> * // map the vertical position of the mouse to values useable for 'master volume control'
<add> * var vol3 = map(mouseY,0,height,0,1);
<add> * gain3.amp(vol3,0.5,0);
<add> * }
<add> *</code></div>
<add> *
<add> */
<ide>
<ide> p5.Gain = function() {
<ide> this.ac = p5sound.audiocontext; |
|
Java | apache-2.0 | 764e3634229b4cb50868c14d8f1f486a6ca94545 | 0 | webanno/webanno,webanno/webanno,webanno/webanno,webanno/webanno | /*
* Copyright 2015
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.brat.annotation.component;
import static de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeUtil.getAdapter;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getAddr;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getFeature;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getNextSentenceAddress;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.findWindowStartCenteringOnSelection;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getSentenceNumber;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.isSame;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.selectAt;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.selectByAddr;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.setFeature;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.NoResultException;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.uima.UIMAException;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASRuntimeException;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.fit.util.CasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxEventBehavior;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.attributes.AjaxCallListener;
import org.apache.wicket.ajax.attributes.AjaxRequestAttributes;
import org.apache.wicket.ajax.attributes.IAjaxCallListener;
import org.apache.wicket.ajax.attributes.ThrottlingSettings;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.form.AjaxFormValidatingBehavior;
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
import org.apache.wicket.markup.head.PriorityHeaderItem;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.AbstractTextComponent;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.ChoiceRenderer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.RefreshingView;
import org.apache.wicket.markup.repeater.util.ModelIteratorAdapter;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.request.Request;
import org.apache.wicket.request.cycle.RequestCycle;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.apache.wicket.util.time.Duration;
import org.codehaus.plexus.util.StringUtils;
import com.googlecode.wicket.jquery.core.Options;
import com.googlecode.wicket.jquery.core.template.IJQueryTemplate;
import com.googlecode.wicket.jquery.ui.widget.tooltip.TooltipBehavior;
import com.googlecode.wicket.kendo.ui.form.NumberTextField;
import com.googlecode.wicket.kendo.ui.form.TextField;
import com.googlecode.wicket.kendo.ui.form.combobox.ComboBox;
import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService;
import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService;
import de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.ArcAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.ChainAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.SpanAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeUtil;
import de.tudarmstadt.ukp.clarin.webanno.brat.annotation.action.ActionContext;
import de.tudarmstadt.ukp.clarin.webanno.brat.annotation.action.Selection;
import de.tudarmstadt.ukp.clarin.webanno.brat.exception.BratAnnotationException;
import de.tudarmstadt.ukp.clarin.webanno.brat.message.SpanAnnotationResponse;
import de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil;
import de.tudarmstadt.ukp.clarin.webanno.brat.render.model.VID;
import de.tudarmstadt.ukp.clarin.webanno.brat.util.JavascriptUtils;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.Evaluator;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.PossibleValue;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.RulesIndicator;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.ValuesGenerator;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationFeature;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer;
import de.tudarmstadt.ukp.clarin.webanno.model.Mode;
import de.tudarmstadt.ukp.clarin.webanno.model.MultiValueMode;
import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.Tag;
import de.tudarmstadt.ukp.clarin.webanno.model.TagSet;
import de.tudarmstadt.ukp.clarin.webanno.support.DefaultFocusBehavior;
import de.tudarmstadt.ukp.clarin.webanno.support.DefaultFocusBehavior2;
import de.tudarmstadt.ukp.clarin.webanno.support.DescriptionTooltipBehavior;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
import de.tudarmstadt.ukp.dkpro.core.api.syntax.type.dependency.Dependency;
/**
* Annotation Detail Editor Panel.
*
*/
public class AnnotationDetailEditorPanel
extends Panel
{
private static final long serialVersionUID = 7324241992353693848L;
private static final Log LOG = LogFactory.getLog(AnnotationDetailEditorPanel.class);
@SpringBean(name = "documentRepository")
private RepositoryService repository;
@SpringBean(name = "annotationService")
private AnnotationService annotationService;
private AnnotationFeatureForm annotationFeatureForm;
private Label selectedTextLabel;
private CheckBox forwardAnnotationCheck;
private RefreshingView<FeatureModel> featureValues;
private AjaxButton deleteButton;
private AjaxButton reverseButton;
private LayerSelector layerSelector;
private TextField<String> forwardAnnotationText;
private Label selectedAnnotationLayer;
private ModalWindow deleteModal;
private List<AnnotationLayer> annotationLayers = new ArrayList<AnnotationLayer>();
private List<FeatureModel> featureModels;
private ActionContext bModel;
private String selectedTag = "";
/**
* Function to return tooltip using jquery
* Docs for the JQuery tooltip widget that we configure below:
* https://api.jqueryui.com/tooltip/
*/
private final String functionForTooltip = "function() { return "
+ "'<div class=\"tooltip-title\">'+($(this).text() "
+ "? $(this).text() : 'no title')+'</div>"
+ "<div class=\"tooltip-content tooltip-pre\">'+($(this).attr('title') "
+ "? $(this).attr('title') : 'no description' )+'</div>' }";
public AnnotationDetailEditorPanel(String id, IModel<ActionContext> aModel)
{
super(id, aModel);
bModel = aModel.getObject();
featureModels = new ArrayList<>();
annotationFeatureForm = new AnnotationFeatureForm("annotationFeatureForm",
aModel.getObject())
{
private static final long serialVersionUID = 8081614428845920047L;
@Override
protected void onConfigure()
{
super.onConfigure();
// Avoid reversing in read-only layers
setEnabled(bModel.getDocument() != null && !isAnnotationFinished());
}
};
annotationFeatureForm.setOutputMarkupId(true);
annotationFeatureForm.add(new AjaxFormValidatingBehavior(annotationFeatureForm, "submit") {
private static final long serialVersionUID = -5642108496844056023L;
@Override
protected void onSubmit(AjaxRequestTarget aTarget) {
try {
actionAnnotate(aTarget, bModel, false);
} catch (UIMAException | ClassNotFoundException | IOException | BratAnnotationException e) {
error(e.getMessage());
}
}
});
add(annotationFeatureForm);
}
public boolean isAnnotationFinished()
{
if (bModel.getMode().equals(Mode.CURATION)) {
return bModel.getDocument().getState().equals(SourceDocumentState.CURATION_FINISHED);
}
else {
return repository.getAnnotationDocument(bModel.getDocument(), bModel.getUser())
.getState().equals(AnnotationDocumentState.FINISHED);
}
}
private class AnnotationFeatureForm
extends Form<ActionContext>
{
private static final long serialVersionUID = 3635145598405490893L;
private WebMarkupContainer featureEditorsContainer;
public AnnotationFeatureForm(String id, ActionContext aBModel)
{
super(id, new CompoundPropertyModel<ActionContext>(aBModel));
add(forwardAnnotationCheck = new CheckBox("forwardAnnotation")
{
private static final long serialVersionUID = 8908304272310098353L;
@Override
protected void onConfigure()
{
super.onConfigure();
setEnabled(isForwardable());
updateForwardAnnotation(bModel);
}
});
forwardAnnotationCheck.add(new AjaxFormComponentUpdatingBehavior("change")
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
updateForwardAnnotation(getModelObject());
if(bModel.isForwardAnnotation()){
aTarget.appendJavaScript(JavascriptUtils.getFocusScript(forwardAnnotationText));
selectedTag = "";
}
}
});
forwardAnnotationCheck.setOutputMarkupId(true);
add(new Label("noAnnotationWarning", "No annotation selected!"){
private static final long serialVersionUID = -6046409838139863541L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(!bModel.getSelection().getAnnotation().isSet());
}
});
add(deleteButton = new AjaxButton("delete")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
// Avoid deleting in read-only layers
setEnabled(bModel.getSelectedAnnotationLayer() != null
&& !bModel.getSelectedAnnotationLayer().isReadonly());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
try {
JCas jCas = getCas(bModel);
AnnotationFS fs = selectByAddr(jCas, bModel.getSelection().getAnnotation().getId());
AnnotationLayer layer = bModel.getSelectedAnnotationLayer();
TypeAdapter adapter = getAdapter(annotationService, layer);
if (adapter instanceof SpanAdapter && getAttachedRels(jCas, fs, layer).size() > 0) {
deleteModal.setTitle("Are you sure you like to delete all attached relations to this span annotation?");
deleteModal.setContent(new DeleteOrReplaceAnnotationModalPanel(
deleteModal.getContentId(), bModel, deleteModal,
AnnotationDetailEditorPanel.this,
bModel.getSelectedAnnotationLayer(), false));
deleteModal.show(aTarget);
}
else {
actionDelete(aTarget, bModel);
}
}
catch (UIMAException | ClassNotFoundException | IOException
| CASRuntimeException | BratAnnotationException e) {
error(e.getMessage());
}
}
});
add(reverseButton = new AjaxButton("reverse")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().isRelationAnno()
&& bModel.getSelection().getAnnotation().isSet()
&& bModel.getSelectedAnnotationLayer().getType()
.equals(WebAnnoConst.RELATION_TYPE));
// Avoid reversing in read-only layers
setEnabled(bModel.getSelectedAnnotationLayer() != null
&& !bModel.getSelectedAnnotationLayer().isReadonly());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
aTarget.addChildren(getPage(), FeedbackPanel.class);
try {
actionReverse(aTarget, bModel);
}
catch (BratAnnotationException e) {
aTarget.prependJavaScript("alert('" + e.getMessage() + "')");
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(e.getMessage(), e);
}
}
});
reverseButton.setOutputMarkupPlaceholderTag(true);
add(new AjaxButton("clear")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
aTarget.addChildren(getPage(), FeedbackPanel.class);
try {
actionClear(aTarget, bModel);
}
catch (UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(e.getMessage(), e);
}
}
});
add(layerSelector = new LayerSelector("defaultAnnotationLayer", annotationLayers));
featureEditorsContainer = new WebMarkupContainer("featureEditorsContainer")
{
private static final long serialVersionUID = 8908304272310098353L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
}
};
// Add placeholder since wmc might start out invisible. Without the placeholder we
// cannot make it visible in an AJAX call
featureEditorsContainer.setOutputMarkupPlaceholderTag(true);
featureEditorsContainer.setOutputMarkupId(true);
featureEditorsContainer.add(new Label("noFeaturesWarning", "No features available!") {
private static final long serialVersionUID = 4398704672665066763L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(featureModels.isEmpty());
}
});
featureValues = new FeatureEditorPanelContent("featureValues");
featureEditorsContainer.add(featureValues);
forwardAnnotationText = new TextField<String>("forwardAnno");
forwardAnnotationText.setOutputMarkupId(true);
forwardAnnotationText.add(new AjaxFormComponentUpdatingBehavior("keyup") {
private static final long serialVersionUID = 4554834769861958396L;
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
super.updateAjaxAttributes(attributes);
IAjaxCallListener listener = new AjaxCallListener(){
private static final long serialVersionUID = -7968540662654079601L;
@Override
public CharSequence getPrecondition(Component component) {
return "var keycode = Wicket.Event.keyCode(attrs.event);" +
" return true;" ;
}
};
attributes.getAjaxCallListeners().add(listener);
attributes.getDynamicExtraParameters()
.add("var eventKeycode = Wicket.Event.keyCode(attrs.event);" +
"return {keycode: eventKeycode};");
attributes.setAllowDefault(true);
}
@Override
protected void onUpdate(AjaxRequestTarget aTarget) {
final Request request = RequestCycle.get().getRequest();
final String jsKeycode = request.getRequestParameters()
.getParameterValue("keycode").toString("");
if (jsKeycode.equals("32")){
try {
actionAnnotate(aTarget, aBModel, false);
selectedTag ="";
} catch (UIMAException | ClassNotFoundException | IOException | BratAnnotationException e) {
error(e);
}
return;
}
if (jsKeycode.equals("13")){
selectedTag ="";
return;
}
selectedTag = (forwardAnnotationText.getModelObject() == null ? ""
: forwardAnnotationText.getModelObject().charAt(0)) + selectedTag;
Map<String, String> bindTags = getBindTags();
if (!bindTags.isEmpty()) {
featureModels.get(0).value = getKeyBindValue(selectedTag, bindTags);
}
aTarget.add(forwardAnnotationText);
aTarget.add(featureValues.get(0));
}
});
forwardAnnotationText.setOutputMarkupId(true);
forwardAnnotationText.add(new AttributeAppender("style", "opacity:0", ";"));
// forwardAnno.add(new AttributeAppender("style", "filter:alpha(opacity=0)", ";"));
add(forwardAnnotationText);
// the selected text for annotation
selectedTextLabel = new Label("selectedText", PropertyModel.of(getModelObject(),
"selection.text"));
selectedTextLabel.setOutputMarkupId(true);
featureEditorsContainer.add(selectedTextLabel);
featureEditorsContainer.add(new Label("layerName","Layer"){
private static final long serialVersionUID = 6084341323607243784L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getPreferences().isRememberLayer());
}
});
featureEditorsContainer.setOutputMarkupId(true);
// the annotation layer for the selected annotation
selectedAnnotationLayer = new Label("selectedAnnotationLayer", new Model<String>())
{
private static final long serialVersionUID = 4059460390544343324L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getPreferences().isRememberLayer());
}
};
selectedAnnotationLayer.setOutputMarkupId(true);
featureEditorsContainer.add(selectedAnnotationLayer);
add(featureEditorsContainer);
add(deleteModal = new ModalWindow("yesNoModal"));
deleteModal.setOutputMarkupId(true);
deleteModal.setInitialWidth(600);
deleteModal.setInitialHeight(50);
deleteModal.setResizable(true);
deleteModal.setWidthUnit("px");
deleteModal.setHeightUnit("px");
deleteModal.setTitle("Are you sure you want to delete the existing annotation?");
}
}
public void actionAnnotate(AjaxRequestTarget aTarget, ActionContext aBModel, boolean aIsForwarded)
throws UIMAException, ClassNotFoundException, IOException, BratAnnotationException
{
if (isAnnotationFinished()) {
throw new BratAnnotationException(
"This document is already closed. Please ask your project manager to re-open it via the Monitoring page");
}
// If there is no annotation yet, create one. During creation, the adapter
// may notice that it would create a duplicate and return the address of
// an existing annotation instead of a new one.
JCas jCas = getCas(aBModel);
actionAnnotate(aTarget, aBModel, jCas, aIsForwarded);
}
public void actionAnnotate(AjaxRequestTarget aTarget, ActionContext aBModel, JCas jCas, boolean aIsForwarded)
throws UIMAException, ClassNotFoundException, IOException, BratAnnotationException
{
if (aBModel.getSelectedAnnotationLayer() == null) {
error("No layer is selected. First select a layer.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
if (aBModel.getSelectedAnnotationLayer().isReadonly()) {
error("Layer is not editable.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
// Verify if input is valid according to tagset
for (int i = 0; i < featureModels.size(); i++) {
AnnotationFeature feature = featureModels.get(i).feature;
if (CAS.TYPE_NAME_STRING.equals(feature.getType())) {
String value = (String) featureModels.get(i).value;
// Check if tag is necessary, set, and correct
if (
value != null &&
feature.getTagset() != null &&
!feature.getTagset().isCreateTag() &&
!annotationService.existsTag(value, feature.getTagset())
) {
error("[" + value
+ "] is not in the tag list. Please choose from the existing tags");
return;
}
}
}
// #186 - After filling a slot, the annotation detail panel is not updated
aTarget.add(annotationFeatureForm);
TypeAdapter adapter = getAdapter(annotationService, aBModel.getSelectedAnnotationLayer());
Selection selection = aBModel.getSelection();
if (selection.getAnnotation().isNotSet()) {
if (bModel.getSelection().isRelationAnno()) {
AnnotationFS originFs = selectByAddr(jCas, selection.getOrigin());
AnnotationFS targetFs = selectByAddr(jCas, selection.getTarget());
if (adapter instanceof SpanAdapter) {
error("Layer do not support arc annotation.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
if (adapter instanceof ArcAdapter) {
AnnotationFS arc = ((ArcAdapter) adapter).add(originFs, targetFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), null, null);
selection.setAnnotation(new VID(getAddr(arc)));
if (selection.getAnnotation().isSet()) {
selection.setText("[" + originFs.getCoveredText() + "] - [" +
targetFs.getCoveredText() + "]");
}
else {
selection.setText("");
}
} else {
selection.setAnnotation(
new VID(((ChainAdapter) adapter).addArc(jCas, originFs, targetFs, null, null)));
if (selection.getAnnotation().isSet()) {
selection.setText(originFs.getCoveredText());
}
else {
selection.setText("");
}
}
selection.setBegin(originFs.getBegin());
} else if (adapter instanceof SpanAdapter) {
for (FeatureModel fm : featureModels) {
Serializable spanValue = ((SpanAdapter) adapter).getSpan(jCas, selection.getBegin(),
selection.getEnd(), fm.feature, null);
if (spanValue != null) {
// allow modification for forward annotation
if (aBModel.isForwardAnnotation()) {
fm.value = spanValue;
featureModels.get(0).value = spanValue;
selectedTag =
getBindTags().entrySet().stream().filter(e -> e.getValue().equals(spanValue))
.map(Map.Entry::getKey).findFirst().orElse(null);
} else {
actionClear(aTarget, bModel);
throw new BratAnnotationException("Cannot create another annotation of layer ["
+ bModel.getSelectedAnnotationLayer().getUiName() + "] at this"
+ " location - stacking is not enabled for this layer.");
}
}
}
Integer annoId = ((SpanAdapter) adapter).add(jCas, selection.getBegin(), selection.getEnd(), null, null);
selection.setAnnotation(new VID(annoId));
AnnotationFS annoFs = BratAjaxCasUtil.selectByAddr(jCas, annoId);
selection.set(jCas, annoFs.getBegin(), annoFs.getEnd());
} else {
for (FeatureModel fm : featureModels) {
Serializable spanValue = ((ChainAdapter) adapter).getSpan(jCas, selection.getBegin(),
selection.getEnd(), fm.feature, null);
if (spanValue != null) {
// allow modification for forward annotation
if (aBModel.isForwardAnnotation()) {
fm.value = spanValue;
featureModels.get(0).value = spanValue;
selectedTag =
getBindTags().entrySet().stream().filter(e -> e.getValue().equals(spanValue))
.map(Map.Entry::getKey).findFirst().orElse(null);
}
}
}
selection.setAnnotation(new VID(((ChainAdapter) adapter).addSpan(
jCas, selection.getBegin(), selection.getEnd(), null, null)));
selection.setText(jCas.getDocumentText().substring(
selection.getBegin(), selection.getEnd()));
}
}
// Set feature values
List<AnnotationFeature> features = new ArrayList<AnnotationFeature>();
for (FeatureModel fm : featureModels) {
features.add(fm.feature);
// For string features with extensible tagsets, extend the tagset
if (CAS.TYPE_NAME_STRING.equals(fm.feature.getType())) {
String value = (String) fm.value;
if (
value != null &&
fm.feature.getTagset() != null &&
fm.feature.getTagset().isCreateTag() &&
!annotationService.existsTag(value, fm.feature.getTagset())
) {
Tag selectedTag = new Tag();
selectedTag.setName(value);
selectedTag.setTagSet(fm.feature.getTagset());
annotationService.createTag(selectedTag, aBModel.getUser());
}
}
adapter.updateFeature(jCas, fm.feature, aBModel.getSelection().getAnnotation().getId(),
fm.value);
}
// Update progress information
int sentenceNumber = getSentenceNumber(jCas, aBModel.getSelection().getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
// persist changes
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
if (bModel.getSelection().isRelationAnno()) {
aBModel.setRememberedArcLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedArcFeatures(featureModels);
}
else {
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedSpanFeatures(featureModels);
}
aBModel.getSelection().setAnnotate(true);
if (aBModel.getSelection().getAnnotation().isSet()) {
String bratLabelText = TypeUtil.getBratLabelText(adapter,
selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId()), features);
info(generateMessage(aBModel.getSelectedAnnotationLayer(), bratLabelText, false));
}
onAnnotate(aTarget, aBModel);
if (aBModel.isForwardAnnotation() && !aIsForwarded && featureModels.get(0).value != null) {
if (aBModel.getSelection().getEnd() >= aBModel.getFirstVisibleSentenceEnd()) {
autoScroll(jCas, aBModel, true);
}
onAutoForward(aTarget, aBModel);
} else if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
forwardAnnotationText.setModelObject(null);
onChange(aTarget, aBModel);
if (aBModel.isForwardAnnotation() && featureModels.get(0).value != null) {
aTarget.add(annotationFeatureForm);
}
}
public void actionDelete(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, CASRuntimeException,
BratAnnotationException
{
JCas jCas = getCas(aBModel);
AnnotationFS fs = selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId());
// TODO We assume here that the selected annotation layer corresponds to the type of the
// FS to be deleted. It would be more robust if we could get the layer from the FS itself.
AnnotationLayer layer = aBModel.getSelectedAnnotationLayer();
TypeAdapter adapter = getAdapter(annotationService, layer);
// == DELETE ATTACHED RELATIONS ==
// If the deleted FS is a span, we must delete all relations that
// point to it directly or indirectly via the attachFeature.
//
// NOTE: It is important that this happens before UNATTACH SPANS since the attach feature
// is no longer set after UNATTACH SPANS!
if (adapter instanceof SpanAdapter) {
for (AnnotationFS attachedFs : getAttachedRels(jCas, fs, layer)) {
jCas.getCas().removeFsFromIndexes(attachedFs);
info("The attached annotation for relation type [" + annotationService
.getLayer(attachedFs.getType().getName(), bModel.getProject()).getUiName()
+ "] is deleted");
}
}
// == DELETE ATTACHED SPANS ==
// This case is currently not implemented because WebAnno currently does not allow to
// create spans that attach to other spans. The only span type for which this is relevant
// is the Token type which cannot be deleted.
// == UNATTACH SPANS ==
// If the deleted FS is a span that is attached to another span, the
// attachFeature in the other span must be set to null. Typical example: POS is deleted, so
// the pos feature of Token must be set to null. This is a quick case, because we only need
// to look at span annotations that have the same offsets as the FS to be deleted.
if (adapter instanceof SpanAdapter && layer.getAttachType() != null) {
Type spanType = CasUtil.getType(jCas.getCas(), layer.getAttachType().getName());
Feature attachFeature = spanType.getFeatureByBaseName(layer.getAttachFeature()
.getName());
for (AnnotationFS attachedFs : selectAt(jCas.getCas(), spanType, fs.getBegin(),
fs.getEnd())) {
if (isSame(attachedFs.getFeatureValue(attachFeature), fs)) {
attachedFs.setFeatureValue(attachFeature, null);
LOG.debug("Unattached [" + attachFeature.getShortName() + "] on annotation ["
+ getAddr(attachedFs) + "]");
}
}
}
// == CLEAN UP LINK FEATURES ==
// If the deleted FS is a span that is the target of a link feature, we must unset that
// link and delete the slot if it is a multi-valued link. Here, we have to scan all
// annotations from layers that have link features that could point to the FS
// to be deleted: the link feature must be the type of the FS or it must be generic.
if (adapter instanceof SpanAdapter) {
for (AnnotationFeature linkFeature : annotationService.listAttachedLinkFeatures(layer)) {
Type linkType = CasUtil.getType(jCas.getCas(), linkFeature.getLayer().getName());
for (AnnotationFS linkFS : CasUtil.select(jCas.getCas(), linkType)) {
List<LinkWithRoleModel> links = getFeature(linkFS, linkFeature);
Iterator<LinkWithRoleModel> i = links.iterator();
boolean modified = false;
while (i.hasNext()) {
LinkWithRoleModel link = i.next();
if (link.targetAddr == getAddr(fs)) {
i.remove();
LOG.debug("Cleared slot [" + link.role + "] in feature ["
+ linkFeature.getName() + "] on annotation [" + getAddr(linkFS)
+ "]");
modified = true;
}
}
if (modified) {
setFeature(linkFS, linkFeature, links);
}
}
}
}
// If the deleted FS is a relation, we don't have to do anything. Nothing can point to a
// relation.
if (adapter instanceof ArcAdapter) {
// Do nothing ;)
}
// Actually delete annotation
adapter.delete(jCas, aBModel.getSelection().getAnnotation());
// Store CAS again
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
// Update progress information
int sentenceNumber = getSentenceNumber(jCas, aBModel.getSelection().getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
// Auto-scroll
if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.getSelection().setAnnotate(false);
info(generateMessage(aBModel.getSelectedAnnotationLayer(), null, true));
// A hack to remember the visual DropDown display value
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedSpanFeatures(featureModels);
aBModel.getSelection().clear();
// after delete will follow annotation
bModel.getSelection().setAnnotate(true);
aTarget.add(annotationFeatureForm);
aTarget.add(deleteButton);
aTarget.add(reverseButton);
onChange(aTarget, aBModel);
onDelete(aTarget, aBModel, fs);
}
private void actionReverse(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, BratAnnotationException
{
JCas jCas;
jCas = getCas(aBModel);
AnnotationFS idFs = selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId());
jCas.removeFsFromIndexes(idFs);
AnnotationFS originFs = selectByAddr(jCas, aBModel.getSelection().getOrigin());
AnnotationFS targetFs = selectByAddr(jCas, aBModel.getSelection().getTarget());
TypeAdapter adapter = getAdapter(annotationService, aBModel.getSelectedAnnotationLayer());
if (adapter instanceof ArcAdapter) {
if(featureModels.size()==0){
//If no features, still create arc #256
AnnotationFS arc = ((ArcAdapter) adapter).add(targetFs, originFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), null, null);
aBModel.getSelection().setAnnotation(new VID(getAddr(arc)));
}
else{
for (FeatureModel fm : featureModels) {
AnnotationFS arc = ((ArcAdapter) adapter).add(targetFs, originFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), fm.feature,
fm.value);
aBModel.getSelection().setAnnotation(new VID(getAddr(arc)));
}
}
}
else {
error("chains cannot be reversed");
return;
}
// persist changes
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
int sentenceNumber = getSentenceNumber(jCas, originFs.getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
info("The arc has been reversed");
aBModel.setRememberedArcLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedArcFeatures(featureModels);
// in case the user re-reverse it
int temp = aBModel.getSelection().getOrigin();
aBModel.getSelection().setOrigin(aBModel.getSelection().getTarget());
aBModel.getSelection().setTarget(temp);
onChange(aTarget, aBModel);
}
public void actionClear(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, BratAnnotationException
{
reset(aTarget);
aTarget.add(annotationFeatureForm);
onChange(aTarget, aBModel);
}
public JCas getCas(ActionContext aBModel)
throws UIMAException, IOException, ClassNotFoundException
{
if (aBModel.getMode().equals(Mode.ANNOTATION) || aBModel.getMode().equals(Mode.AUTOMATION)
|| aBModel.getMode().equals(Mode.CORRECTION)
|| aBModel.getMode().equals(Mode.CORRECTION_MERGE)) {
return repository.readAnnotationCas(aBModel.getDocument(), aBModel.getUser());
}
else {
return repository.readCurationCas(aBModel.getDocument());
}
}
/**
* Scroll the window of visible annotations.
*
* @param aForward
* instead of centering on the sentence that had the last editor, just scroll down
* one sentence. This is for forward-annotation mode.
*/
private void autoScroll(JCas jCas, ActionContext aBModel, boolean aForward)
{
if (aForward) {
// Fetch the first sentence on screen
Sentence sentence = selectByAddr(jCas, Sentence.class,
aBModel.getFirstVisibleSentenceAddress());
// Find the following one
int address = getNextSentenceAddress(jCas, sentence);
// Move to it
aBModel.setFirstVisibleSentence(selectByAddr(jCas, Sentence.class, address));
}
else {
// Fetch the first sentence on screen
Sentence sentence = selectByAddr(jCas, Sentence.class,
aBModel.getFirstVisibleSentenceAddress());
// Calculate the first sentence in the window in such a way that the annotation
// currently selected is in the center of the window
sentence = findWindowStartCenteringOnSelection(jCas, sentence,
aBModel.getSelection().getBegin(), aBModel.getProject(), aBModel.getDocument(),
aBModel.getPreferences().getWindowSize());
// Move to it
aBModel.setFirstVisibleSentence(sentence);
}
}
@SuppressWarnings("unchecked")
public void setSlot(AjaxRequestTarget aTarget, JCas aJCas, final ActionContext aBModel,
int aAnnotationId)
{
// Set an armed slot
if (!bModel.getSelection().isRelationAnno() && aBModel.isSlotArmed()) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) getFeatureModel(aBModel
.getArmedFeature()).value;
LinkWithRoleModel link = links.get(aBModel.getArmedSlot());
link.targetAddr = aAnnotationId;
link.label = selectByAddr(aJCas, aAnnotationId).getCoveredText();
aBModel.clearArmedSlot();
}
// Auto-commit if working on existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
try {
actionAnnotate(aTarget, bModel, aJCas, false);
}
catch (BratAnnotationException e) {
error(e.getMessage());
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
}
private void arcSelected(AjaxRequestTarget aTarget, JCas aJCas)
throws BratAnnotationException
{
// FIXME REC I think this whole section which meddles around with the selected annotation
// layer should be moved out of there to the place where we originally set the annotation
// layer...!
long layerId = TypeUtil.getLayerId(bModel.getSelection().getOriginType());
AnnotationLayer spanLayer = annotationService.getLayer(layerId);
if (
bModel.getPreferences().isRememberLayer() &&
bModel.getSelection().isAnnotate() &&
!spanLayer.equals(bModel.getDefaultAnnotationLayer()))
{
throw new BratAnnotationException("No relation annotation allowed on the "
+ "selected span layer");
}
// If we are creating a relation annotation, we have to set the current layer depending
// on the type of relation that is permitted between the source/target span. This is
// necessary because we have no separate UI control to set the relation annotation type.
// It is possible because currently only a single relation layer is allowed to attach to
// any given span layer.
if (bModel.getSelection().isAnnotate())
{
// If we drag an arc between POS annotations, then the relation must be a dependency
// relation.
// FIXME - Actually this case should be covered by the last case - the database lookup!
if (
spanLayer.isBuiltIn() &&
spanLayer.getName().equals(POS.class.getName()))
{
AnnotationLayer depLayer = annotationService.getLayer(Dependency.class.getName(),
bModel.getProject());
if (bModel.getAnnotationLayers().contains(depLayer)) {
bModel.setSelectedAnnotationLayer(depLayer);
}
else {
bModel.setSelectedAnnotationLayer(null);
}
}
// If we drag an arc in a chain layer, then the arc is of the same layer as the span
// Chain layers consist of arcs and spans
else if (spanLayer.getType().equals(WebAnnoConst.CHAIN_TYPE)) {
// one layer both for the span and arc annotation
bModel.setSelectedAnnotationLayer(spanLayer);
}
// Otherwise, look up the possible relation layer(s) in the database.
else {
for (AnnotationLayer layer : annotationService.listAnnotationLayer(bModel
.getProject())) {
if (layer.getAttachType() != null && layer.getAttachType().equals(spanLayer)) {
if (bModel.getAnnotationLayers().contains(layer)) {
bModel.setSelectedAnnotationLayer(layer);
}
else {
bModel.setSelectedAnnotationLayer(null);
}
break;
}
}
}
}
// Populate feature value from existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
AnnotationFS annoFs = selectByAddr(aJCas, bModel.getSelection().getAnnotation()
.getId());
// Try obtaining the layer from the feature structure
AnnotationLayer layer;
try {
layer = TypeUtil.getLayer(annotationService, bModel.getProject(), annoFs);
}
catch (NoResultException e) {
clearFeatures(aTarget);
throw new IllegalStateException("Unknown layer [" + annoFs.getType().getName() + "]", e);
}
populateFeatures(layer, annoFs, null);
}
// Avoid creation of arcs on locked layers
else if (bModel.getSelectedAnnotationLayer() != null
&& bModel.getSelectedAnnotationLayer().isReadonly()) {
bModel.setSelectedAnnotationLayer(new AnnotationLayer());
}
else {
populateFeatures(bModel.getSelectedAnnotationLayer(), null,
bModel.getRememberedArcFeatures());
}
bModel.setDefaultAnnotationLayer(spanLayer);
}
private void spanSelected(AjaxRequestTarget aTarget, JCas aJCas)
{
// Selecting an existing span annotation
if (bModel.getSelection().getAnnotation().isSet()) {
AnnotationFS annoFs = selectByAddr(aJCas, bModel.getSelection().getAnnotation()
.getId());
// Try obtaining the layer from the feature structure
AnnotationLayer layer;
try {
layer = TypeUtil.getLayer(annotationService, bModel.getProject(), annoFs);
}
catch (NoResultException e) {
clearFeatures(aTarget);
throw new IllegalStateException("Unknown layer [" + annoFs.getType().getName() + "]", e);
}
// If remember layer is off, then the current layer follows the selected annotations
if (!bModel.getPreferences().isRememberLayer()) {
bModel.setSelectedAnnotationLayer(layer);
}
// populate feature value
populateFeatures(layer, annoFs, null);
}
else {
populateFeatures(bModel.getSelectedAnnotationLayer(), null,
bModel.getRememberedSpanFeatures());
}
}
protected void onChange(AjaxRequestTarget aTarget, ActionContext aBModel)
{
// Overriden in CurationPanel
}
protected void onAutoForward(AjaxRequestTarget aTarget, ActionContext aBModel)
{
// Overriden in CurationPanel
}
protected void onAnnotate(AjaxRequestTarget aTarget, ActionContext aModel)
{
// Overriden in AutomationPage
}
protected void onDelete(AjaxRequestTarget aTarget, ActionContext aModel, AnnotationFS aFs)
{
// Overriden in AutomationPage
}
public void refreshAnnotationLayers(ActionContext aBModel)
{
updateLayersDropdown(aBModel);
if (annotationLayers.size() == 0) {
aBModel.setSelectedAnnotationLayer(new AnnotationLayer());
}
else if (aBModel.getSelectedAnnotationLayer() == null) {
if (aBModel.getRememberedSpanLayer() == null) {
aBModel.setSelectedAnnotationLayer(annotationLayers.get(0));
}
else {
aBModel.setSelectedAnnotationLayer(aBModel.getRememberedSpanLayer());
}
}
clearFeatures(null);
updateRememberLayer();
}
private void updateLayersDropdown(ActionContext aBModel)
{
annotationLayers.clear();
AnnotationLayer l = null;
for (AnnotationLayer layer : aBModel.getAnnotationLayers()) {
if (!layer.isEnabled() || layer.isReadonly()
|| layer.getName().equals(Token.class.getName())) {
continue;
}
if (layer.getType().equals(WebAnnoConst.SPAN_TYPE)) {
annotationLayers.add(layer);
l = layer;
}
// manage chain type
else if (layer.getType().equals(WebAnnoConst.CHAIN_TYPE)) {
for (AnnotationFeature feature : annotationService.listAnnotationFeature(layer)) {
if (!feature.isEnabled()) {
continue;
}
if (feature.getName().equals(WebAnnoConst.COREFERENCE_TYPE_FEATURE)) {
annotationLayers.add(layer);
}
}
}
// chain
}
if (bModel.getDefaultAnnotationLayer() != null) {
bModel.setSelectedAnnotationLayer(bModel.getDefaultAnnotationLayer());
}
else if (l != null) {
bModel.setSelectedAnnotationLayer(l);
}
}
public class FeatureEditorPanelContent
extends RefreshingView<FeatureModel>
{
private static final long serialVersionUID = -8359786805333207043L;
public FeatureEditorPanelContent(String aId)
{
super(aId);
setOutputMarkupId(true);
}
@SuppressWarnings("rawtypes")
@Override
protected void populateItem(final Item<FeatureModel> item)
{
// Feature editors that allow multiple values may want to update themselves,
// e.g. to add another slot.
item.setOutputMarkupId(true);
final FeatureModel fm = item.getModelObject();
final FeatureEditor frag;
switch (fm.feature.getMultiValueMode()) {
case NONE: {
switch (fm.feature.getType()) {
case CAS.TYPE_NAME_INTEGER: {
frag = new NumberFeatureEditor("editor", "numberFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_FLOAT: {
frag = new NumberFeatureEditor("editor", "numberFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_BOOLEAN: {
frag = new BooleanFeatureEditor("editor", "booleanFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_STRING: {
frag = new TextFeatureEditor("editor", "textFeatureEditor", item, fm);
break;
}
default:
throw new IllegalArgumentException("Unsupported type [" + fm.feature.getType()
+ "] on feature [" + fm.feature.getName() + "]");
}
break;
}
case ARRAY: {
switch (fm.feature.getLinkMode()) {
case WITH_ROLE: {
// If it is none of the primitive types, it must be a link feature
frag = new LinkFeatureEditor("editor", "linkFeatureEditor", item, fm);
break;
}
default:
throw new IllegalArgumentException("Unsupported link mode ["
+ fm.feature.getLinkMode() + "] on feature [" + fm.feature.getName()
+ "]");
}
break;
}
default:
throw new IllegalArgumentException("Unsupported multi-value mode ["
+ fm.feature.getMultiValueMode() + "] on feature [" + fm.feature.getName()
+ "]");
}
// We need to enable the markup ID here because we use it during the AJAX behavior that
// automatically saves feature editors on change/blur. Check addAnnotateActionBehavior.
frag.setOutputMarkupId(true);
item.add(frag);
if (!fm.feature.getLayer().isReadonly()) {
// whenever it is updating an annotation, it updates automatically when a component
// for the feature lost focus - but updating is for every component edited
// LinkFeatureEditors must be excluded because the auto-update will break the
// ability to add slots. Adding a slot is NOT an annotation action.
// TODO annotate every time except when position is at (0,0)
if (bModel.getSelection().getAnnotation().isSet()
&& !(frag instanceof LinkFeatureEditor)) {
if (frag.isDropOrchoice()) {
addAnnotateActionBehavior(frag, "change");
}
else {
addAnnotateActionBehavior(frag, "blur");
}
}
else if (!(frag instanceof LinkFeatureEditor)) {
if (frag.isDropOrchoice()) {
storeFeatureValue(frag, "change");
}
else {
storeFeatureValue(frag, "blur");
}
}
// Put focus on hidden input field if we are in forward-mode
if (bModel.isForwardAnnotation()) {
forwardAnnotationText.add(new DefaultFocusBehavior2());
}
// Put focus on first component if we select an existing annotation or create a
// new one
else if (
item.getIndex() == 0 &&
SpanAnnotationResponse.is(bModel.getUserAction())
) {
frag.getFocusComponent().add(new DefaultFocusBehavior());
}
// Restore/preserve focus when tabbing through the feature editors
else if (bModel.getUserAction() == null) {
AjaxRequestTarget target = RequestCycle.get().find(AjaxRequestTarget.class);
if (target != null && frag.getFocusComponent().getMarkupId()
.equals(target.getLastFocusedElementId())) {
target.focusComponent(frag.getFocusComponent());
}
}
// Add tooltip on label
StringBuilder tooltipTitle = new StringBuilder();
tooltipTitle.append(fm.feature.getUiName());
if (fm.feature.getTagset() != null) {
tooltipTitle.append(" (");
tooltipTitle.append(fm.feature.getTagset().getName());
tooltipTitle.append(')');
}
Component labelComponent = frag.getLabelComponent();
labelComponent.add(new AttributeAppender("style", "cursor: help", ";"));
labelComponent.add(new DescriptionTooltipBehavior(tooltipTitle.toString(),
fm.feature.getDescription()));
}
else {
frag.getFocusComponent().setEnabled(false);
}
}
private void storeFeatureValue(final FeatureEditor aFrag, String aEvent)
{
aFrag.getFocusComponent().add(new AjaxFormComponentUpdatingBehavior(aEvent)
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
aTarget.add(annotationFeatureForm);
}
});
}
private void addAnnotateActionBehavior(final FeatureEditor aFrag, String aEvent)
{
aFrag.getFocusComponent().add(new AjaxFormComponentUpdatingBehavior(aEvent)
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes aAttributes)
{
super.updateAjaxAttributes(aAttributes);
// When focus is on a feature editor and the user selects a new annotation,
// there is a race condition between the saving the value of the feature editor
// and the loading of the new annotation. Delay the feature editor save to give
// preference to loading the new annotation.
aAttributes.setThrottlingSettings(
new ThrottlingSettings(getMarkupId(), Duration.milliseconds(250), true));
aAttributes.getAjaxCallListeners().add(new AjaxCallListener()
{
private static final long serialVersionUID = 1L;
@Override
public CharSequence getPrecondition(Component aComponent)
{
// If the panel refreshes because the user selects
// a new annotation, the annotation editor panel is updated for the
// new annotation first (before saving values) because of the delay
// set above. When the delay is over, we can no longer save the value
// because the old component is no longer there. We use the markup id
// of the editor fragments to check if the old component is still there
// (i.e. if the user has just tabbed to a new field) or if the old
// component is gone (i.e. the user selected/created another annotation).
// If the old component is no longer there, we abort the delayed save
// action.
return "return $('#"+aFrag.getMarkupId()+"').length > 0;";
}
});
}
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
try {
if (bModel.getConstraints() != null) {
// Make sure we update the feature editor panel because due to
// constraints the contents may have to be re-rendered
aTarget.add(annotationFeatureForm);
}
actionAnnotate(aTarget, bModel, false);
}
catch (BratAnnotationException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
});
}
@Override
protected Iterator<IModel<FeatureModel>> getItemModels()
{
ModelIteratorAdapter<FeatureModel> i = new ModelIteratorAdapter<FeatureModel>(
featureModels)
{
@Override
protected IModel<FeatureModel> model(FeatureModel aObject)
{
return Model.of(aObject);
}
};
return i;
}
}
public static abstract class FeatureEditor
extends Fragment
{
private static final long serialVersionUID = -7275181609671919722L;
protected static final String ID_PREFIX = "featureEditorHead";
public FeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aMarkupProvider,
IModel<?> aModel)
{
super(aId, aMarkupId, aMarkupProvider, aModel);
}
public Component getLabelComponent()
{
return get("feature");
}
abstract public Component getFocusComponent();
abstract public boolean isDropOrchoice();
}
public static class NumberFeatureEditor<T extends Number>
extends FeatureEditor
{
private static final long serialVersionUID = -2426303638953208057L;
@SuppressWarnings("rawtypes")
private final NumberTextField field;
public NumberFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
add(new Label("feature", aModel.feature.getUiName()));
switch (aModel.feature.getType()) {
case CAS.TYPE_NAME_INTEGER: {
field = new NumberTextField<Integer>("value", Integer.class);
break;
}
case CAS.TYPE_NAME_FLOAT: {
field = new NumberTextField<Float>("value", Float.class);
add(field);
break;
}
default:
throw new IllegalArgumentException("Type [" + aModel.feature.getType()
+ "] cannot be rendered as a numeric input field");
}
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
}
@SuppressWarnings("rawtypes")
@Override
public NumberTextField getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return false;
}
};
public static class BooleanFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 5104979547245171152L;
private final CheckBox field;
public BooleanFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
add(new Label("feature", aModel.feature.getUiName()));
field = new CheckBox("value");
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
}
@Override
public Component getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return true;
}
};
public class TextFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 7763348613632105600L;
@SuppressWarnings("rawtypes")
private final AbstractTextComponent field;
private boolean isDrop;
//For showing the status of Constraints rules kicking in.
private RulesIndicator indicator = new RulesIndicator();
private boolean hideUnconstraintFeature;
/**
* Hides feature if "Hide un-constraint feature" is enabled
* and constraint rules are applied and feature doesn't match any constraint rule
*/
@Override
public boolean isVisible() {
if (hideUnconstraintFeature) {
//if enabled and constraints rule execution returns anything other than green
if (indicator.isAffected() && !indicator.getStatusColor().equals("green")) {
return false;
}
}
return true;
}
public TextFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
//Checks whether hide un-constraint feature is enabled or not
hideUnconstraintFeature = aModel.feature.isHideUnconstraintFeature();
add(new Label("feature", aModel.feature.getUiName()));
indicator.reset(); //reset the indicator
if (aModel.feature.getTagset() != null) {
List<Tag> tagset = null;
ActionContext model = bModel;
// verification to check whether constraints exist for this project or NOT
if (model.getConstraints() != null && model.getSelection().getAnnotation().isSet()) {
// indicator.setRulesExist(true);
tagset = populateTagsBasedOnRules(model, aModel);
}
else {
// indicator.setRulesExist(false);
// Earlier behavior,
tagset = annotationService.listTags(aModel.feature.getTagset());
}
field = new StyledComboBox<Tag>("value", tagset) {
private static final long serialVersionUID = -1735694425658462932L;
@Override
protected void onInitialize()
{
// Ensure proper order of the initializing JS header items: first combo box
// behavior (in super.onInitialize()), then tooltip.
Options options = new Options(DescriptionTooltipBehavior.makeTooltipOptions());
options.set("content", functionForTooltip);
add(new TooltipBehavior("#"+field.getMarkupId()+"_listbox *[title]", options) {
private static final long serialVersionUID = 1854141593969780149L;
@Override
protected String $()
{
// REC: It takes a moment for the KendoDatasource to load the data and
// for the Combobox to render the hidden dropdown. I did not find
// a way to hook into this process and to get notified when the
// data is available in the dropdown, so trying to handle this
// with a slight delay hopeing that all is set up after 1 second.
return "try {setTimeout(function () { " + super.$() + " }, 1000); } catch (err) {}; ";
}
});
super.onInitialize();
}
};
isDrop = true;
}
else {
field = new TextField<String>("value");
}
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
//Shows whether constraints are triggered or not
//also shows state of constraints use.
Component constraintsInUseIndicator = new WebMarkupContainer("textIndicator"){
private static final long serialVersionUID = 4346767114287766710L;
@Override
public boolean isVisible()
{
return indicator.isAffected();
}
}.add(new AttributeAppender("class", new Model<String>(){
private static final long serialVersionUID = -7683195283137223296L;
@Override
public String getObject()
{
//adds symbol to indicator
return indicator.getStatusSymbol();
}
}))
.add(new AttributeAppender("style", new Model<String>(){
private static final long serialVersionUID = -5255873539738210137L;
@Override
public String getObject()
{
//adds color to indicator
return "; color: " + indicator.getStatusColor();
}
}));
add(constraintsInUseIndicator);
}
/**
* Adds and sorts tags based on Constraints rules
*/
private List<Tag> populateTagsBasedOnRules(ActionContext model, FeatureModel aModel)
{
// Add values from rules
String restrictionFeaturePath;
switch (aModel.feature.getLinkMode()) {
case WITH_ROLE:
restrictionFeaturePath = aModel.feature.getName() + "."
+ aModel.feature.getLinkTypeRoleFeatureName();
break;
case NONE:
restrictionFeaturePath = aModel.feature.getName();
break;
default:
throw new IllegalArgumentException("Unsupported link mode ["
+ aModel.feature.getLinkMode() + "] on feature ["
+ aModel.feature.getName() + "]");
}
List<Tag> valuesFromTagset = annotationService.listTags(aModel.feature.getTagset());
try {
JCas jCas = getCas(model);
FeatureStructure featureStructure = selectByAddr(jCas, model.getSelection()
.getAnnotation().getId());
Evaluator evaluator = new ValuesGenerator();
//Only show indicator if this feature can be affected by Constraint rules!
indicator.setAffected(evaluator.isThisAffectedByConstraintRules(featureStructure,
restrictionFeaturePath, model.getConstraints()));
List<PossibleValue> possibleValues;
try {
possibleValues = evaluator.generatePossibleValues(
featureStructure, restrictionFeaturePath, model.getConstraints());
LOG.debug("Possible values for [" + featureStructure.getType().getName() + "] ["
+ restrictionFeaturePath + "]: " + possibleValues);
}
catch (Exception e) {
error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e));
LOG.error("Unable to evaluate constraints: " + e.getMessage(), e);
possibleValues = new ArrayList<>();
}
// only adds tags which are suggested by rules and exist in tagset.
List<Tag> tagset = compareSortAndAdd(possibleValues, valuesFromTagset, indicator);
// add remaining tags
addRemainingTags(tagset, valuesFromTagset);
return tagset;
}
catch (IOException | ClassNotFoundException | UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
return valuesFromTagset;
}
@Override
public Component getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return isDrop;
}
};
public class LinkFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 7469241620229001983L;
private WebMarkupContainer content;
//For showing the status of Constraints rules kicking in.
private RulesIndicator indicator = new RulesIndicator();
@SuppressWarnings("rawtypes")
private final AbstractTextComponent newRole;
private boolean isDrop;
private boolean hideUnconstraintFeature;
/**
* Hides feature if "Hide un-constraint feature" is enabled
* and constraint rules are applied and feature doesn't match any constraint rule
*/
@Override
public boolean isVisible() {
if (hideUnconstraintFeature) {
//if enabled and constraints rule execution returns anything other than green
if (indicator.isAffected() && !indicator.getStatusColor().equals("green")) {
return false;
}
}
return true;
}
@SuppressWarnings("unchecked")
public LinkFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
final FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
//Checks whether hide un-constraint feature is enabled or not
hideUnconstraintFeature = aModel.feature.isHideUnconstraintFeature();
add(new Label("feature", aModel.feature.getUiName()));
// Most of the content is inside this container such that we can refresh it independently
// from the rest of the form
content = new WebMarkupContainer("content");
content.setOutputMarkupId(true);
add(content);
content.add(new RefreshingView<LinkWithRoleModel>("slots",
Model.of((List<LinkWithRoleModel>) aModel.value))
{
private static final long serialVersionUID = 5475284956525780698L;
@Override
protected Iterator<IModel<LinkWithRoleModel>> getItemModels()
{
ModelIteratorAdapter<LinkWithRoleModel> i = new ModelIteratorAdapter<LinkWithRoleModel>(
(List<LinkWithRoleModel>) LinkFeatureEditor.this.getModelObject().value)
{
@Override
protected IModel<LinkWithRoleModel> model(LinkWithRoleModel aObject)
{
return Model.of(aObject);
}
};
return i;
}
@Override
protected void populateItem(final Item<LinkWithRoleModel> aItem)
{
aItem.setModel(new CompoundPropertyModel<LinkWithRoleModel>(aItem
.getModelObject()));
Label role = new Label("role");
aItem.add(role);
final Label label;
if (aItem.getModelObject().targetAddr == -1
&& bModel.isArmedSlot(aModel.feature, aItem.getIndex())) {
label = new Label("label", "<Select to fill>");
}
else {
label = new Label("label");
}
label.add(new AjaxEventBehavior("click")
{
private static final long serialVersionUID = 7633309278417475424L;
@Override
protected void onEvent(AjaxRequestTarget aTarget)
{
if (bModel.isArmedSlot(aModel.feature, aItem.getIndex())) {
bModel.clearArmedSlot();
aTarget.add(content);
}
else {
bModel.setArmedSlot(aModel.feature, aItem.getIndex());
// Need to re-render the whole form because a slot in another
// link editor might get unarmed
aTarget.add(annotationFeatureForm);
}
}
});
label.add(new AttributeAppender("style", new Model<String>()
{
private static final long serialVersionUID = 1L;
@Override
public String getObject()
{
ActionContext model = bModel;
if (model.isArmedSlot(aModel.feature, aItem.getIndex())) {
return "; background: orange";
}
else {
return "";
}
}
}));
aItem.add(label);
}
});
if (aModel.feature.getTagset() != null) {
List<Tag> tagset = null;
//reset the indicator
indicator.reset();
if (bModel.getConstraints() != null && bModel.getSelection().getAnnotation().isSet()) {
// indicator.setRulesExist(true); //Constraint rules exist!
tagset = addTagsBasedOnRules(bModel, aModel);
}
else {
// indicator.setRulesExist(false); //No constraint rules.
// add tagsets only, earlier behavior
tagset = annotationService.listTags(aModel.feature.getTagset());
}
newRole = new StyledComboBox<Tag>("newRole", Model.of(""), tagset) {
private static final long serialVersionUID = 1L;
@Override
protected void onInitialize()
{
super.onInitialize();
// Ensure proper order of the initializing JS header items: first combo box
// behavior (in super.onInitialize()), then tooltip.
Options options = new Options(DescriptionTooltipBehavior.makeTooltipOptions());
options.set("content", functionForTooltip);
add(new TooltipBehavior("#"+newRole.getMarkupId()+"_listbox *[title]", options) {
private static final long serialVersionUID = -7207021885475073279L;
@Override
protected String $()
{
// REC: It takes a moment for the KendoDatasource to load the data and
// for the Combobox to render the hidden dropdown. I did not find
// a way to hook into this process and to get notified when the
// data is available in the dropdown, so trying to handle this
// with a slight delay hopeing that all is set up after 1 second.
return "try {setTimeout(function () { " + super.$() + " }, 1000); } catch (err) {}; ";
}
});
}
@Override
protected void onConfigure()
{
super.onConfigure();
if (bModel.isSlotArmed() && aModel.feature.equals(bModel.getArmedFeature())) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
setModelObject(links.get(bModel.getArmedSlot()).role);
}
else {
setModelObject("");
}
}
};
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
newRole.setOutputMarkupId(true);
newRole.setMarkupId(ID_PREFIX + aModel.feature.getId());
content.add(newRole);
isDrop = true;
}
else {
content.add(newRole = new TextField<String>("newRole", Model.of("")) {
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
if (bModel.isSlotArmed() && aModel.feature.equals(bModel.getArmedFeature())) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
setModelObject(links.get(bModel.getArmedSlot()).role);
}
else {
setModelObject("");
}
}
});
}
//Shows whether constraints are triggered or not
//also shows state of constraints use.
Component constraintsInUseIndicator = new WebMarkupContainer("linkIndicator"){
private static final long serialVersionUID = 4346767114287766710L;
@Override
public boolean isVisible()
{
return indicator.isAffected();
}
}.add(new AttributeAppender("class", new Model<String>(){
private static final long serialVersionUID = -7683195283137223296L;
@Override
public String getObject()
{
//adds symbol to indicator
return indicator.getStatusSymbol();
}
}))
.add(new AttributeAppender("style", new Model<String>(){
private static final long serialVersionUID = -5255873539738210137L;
@Override
public String getObject()
{
//adds color to indicator
return "; color: " + indicator.getStatusColor();
}
}));
add(constraintsInUseIndicator);
// Add a new empty slot with the specified role
content.add(new AjaxButton("add")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure(){
ActionContext model = bModel;
setVisible(!(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature())));
// setEnabled(!(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature())));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
if (StringUtils.isBlank((String) newRole.getModelObject())) {
error("Must set slot label before adding!");
aTarget.addChildren(getPage(), FeedbackPanel.class);
}
else {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
LinkWithRoleModel m = new LinkWithRoleModel();
m.role = (String) newRole.getModelObject();
links.add(m);
bModel.setArmedSlot(LinkFeatureEditor.this.getModelObject().feature,
links.size() - 1);
// Need to re-render the whole form because a slot in another
// link editor might get unarmed
aTarget.add(annotationFeatureForm);
}
}
});
// Allows user to update slot
content.add(new AjaxButton("set"){
private static final long serialVersionUID = 7923695373085126646L;
@Override
protected void onConfigure(){
ActionContext model = bModel;
setVisible(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature()));
// setEnabled(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature()));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
ActionContext model = bModel;
//Update the slot
LinkWithRoleModel m = new LinkWithRoleModel();
m = links.get(model.getArmedSlot());
m.role = (String) newRole.getModelObject();
// int index = model.getArmedSlot(); //retain index
// links.remove(model.getArmedSlot());
// model.clearArmedSlot();
// links.add(m);
links.set(model.getArmedSlot(), m); //avoid reordering
aTarget.add(content);
try {
actionAnnotate(aTarget, bModel, false);
}
catch(BratAnnotationException e){
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCause(e),e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(ExceptionUtils.getRootCause(e),e);
}
}
});
// Add a new empty slot with the specified role
content.add(new AjaxButton("del")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
ActionContext model = bModel;
setVisible(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature()));
// setEnabled(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature()));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
ActionContext model = bModel;
links.remove(model.getArmedSlot());
model.clearArmedSlot();
aTarget.add(content);
// Auto-commit if working on existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
try {
actionAnnotate(aTarget, bModel, false);
}
catch (BratAnnotationException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
}
});
}
/**
* Adds tagset based on Constraints rules, auto-adds tags which are marked important.
*
* @return List containing tags which exist in tagset and also suggested by rules, followed
* by the remaining tags in tagset.
*/
private List<Tag> addTagsBasedOnRules(ActionContext model, final FeatureModel aModel)
{
String restrictionFeaturePath = aModel.feature.getName() + "."
+ aModel.feature.getLinkTypeRoleFeatureName();
List<Tag> valuesFromTagset = annotationService.listTags(aModel.feature.getTagset());
try {
JCas jCas = getCas(model);
FeatureStructure featureStructure = selectByAddr(jCas, model.getSelection()
.getAnnotation().getId());
Evaluator evaluator = new ValuesGenerator();
//Only show indicator if this feature can be affected by Constraint rules!
indicator.setAffected(evaluator.isThisAffectedByConstraintRules(featureStructure,
restrictionFeaturePath, model.getConstraints()));
List<PossibleValue> possibleValues;
try {
possibleValues = evaluator.generatePossibleValues(
featureStructure, restrictionFeaturePath, model.getConstraints());
LOG.debug("Possible values for [" + featureStructure.getType().getName() + "] ["
+ restrictionFeaturePath + "]: " + possibleValues);
}
catch (Exception e) {
error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e));
LOG.error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e), e);
possibleValues = new ArrayList<>();
}
// Only adds tags which are suggested by rules and exist in tagset.
List<Tag> tagset = compareSortAndAdd(possibleValues, valuesFromTagset, indicator);
removeAutomaticallyAddedUnusedEntries();
// Create entries for important tags.
autoAddImportantTags(tagset, possibleValues);
// Add remaining tags.
addRemainingTags(tagset, valuesFromTagset);
return tagset;
}
catch (ClassNotFoundException | UIMAException | IOException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
return valuesFromTagset;
}
private void removeAutomaticallyAddedUnusedEntries()
{
// Remove unused (but auto-added) tags.
@SuppressWarnings("unchecked")
List<LinkWithRoleModel> list = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
Iterator<LinkWithRoleModel> existingLinks = list.iterator();
while (existingLinks.hasNext()) {
LinkWithRoleModel link = existingLinks.next();
if (link.autoCreated && link.targetAddr == -1) {
// remove it
existingLinks.remove();
}
}
}
private void autoAddImportantTags(List<Tag> aTagset, List<PossibleValue> possibleValues)
{
// Construct a quick index for tags
Set<String> tagset = new HashSet<String>();
for (Tag t : aTagset) {
tagset.add(t.getName());
}
// Get links list and build role index
@SuppressWarnings("unchecked")
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
Set<String> roles = new HashSet<String>();
for (LinkWithRoleModel l : links) {
roles.add(l.role);
}
// Loop over values to see which of the tags are important and add them.
for (PossibleValue value : possibleValues) {
if (!value.isImportant() || !tagset.contains(value.getValue())) {
continue;
}
// Check if there is already a slot with the given name
if (roles.contains(value.getValue())) {
continue;
}
// Add empty slot in UI with that name.
LinkWithRoleModel m = new LinkWithRoleModel();
m.role = value.getValue();
// Marking so that can be ignored later.
m.autoCreated = true;
links.add(m);
// NOT arming the slot here!
}
}
public void setModelObject(FeatureModel aModel)
{
setDefaultModelObject(aModel);
}
public FeatureModel getModelObject()
{
return (FeatureModel) getDefaultModelObject();
}
@Override
public Component getFocusComponent()
{
return newRole;
}
@Override
public boolean isDropOrchoice()
{
return isDrop;
}
};
public void clearFeatures(AjaxRequestTarget aTarget)
{
featureModels = new ArrayList<>();
if (aTarget != null) {
aTarget.add(annotationFeatureForm);
}
}
private void populateFeatures(AnnotationLayer aLayer, FeatureStructure aFS,
Map<AnnotationFeature, Serializable> aRemembered)
{
clearFeatures(null);
// Populate from feature structure
for (AnnotationFeature feature : annotationService.listAnnotationFeature(aLayer)) {
if (!feature.isEnabled()) {
continue;
}
Serializable value = null;
if (aFS != null) {
value = (Serializable) BratAjaxCasUtil.getFeature(aFS, feature);
}
else if (aRemembered != null) {
value = aRemembered.get(feature);
}
if (WebAnnoConst.CHAIN_TYPE.equals(feature.getLayer().getType())) {
if (bModel.getSelection().isRelationAnno()) {
if (feature.getLayer().isLinkedListBehavior()
&& WebAnnoConst.COREFERENCE_RELATION_FEATURE.equals(feature
.getName())) {
featureModels.add(new FeatureModel(feature, value));
}
}
else {
if (WebAnnoConst.COREFERENCE_TYPE_FEATURE.equals(feature.getName())) {
featureModels.add(new FeatureModel(feature, value));
}
}
}
else {
featureModels.add(new FeatureModel(feature, value));
}
}
}
public void addRemainingTags(List<Tag> tagset, List<Tag> valuesFromTagset)
{
// adding the remaining part of tagset.
for (Tag remainingTag : valuesFromTagset) {
if (!tagset.contains(remainingTag)) {
tagset.add(remainingTag);
}
}
}
/*
* Compares existing tagset with possible values resulted from rule evaluation Adds only which
* exist in tagset and is suggested by rules. The remaining values from tagset are added
* afterwards.
*/
private static List<Tag> compareSortAndAdd(List<PossibleValue> possibleValues,
List<Tag> valuesFromTagset, RulesIndicator rulesIndicator)
{
//if no possible values, means didn't satisfy conditions
if(possibleValues.isEmpty())
{
rulesIndicator.didntMatchAnyRule();
}
List<Tag> returnList = new ArrayList<Tag>();
// Sorting based on important flag
// possibleValues.sort(null);
// Comparing to check which values suggested by rules exists in existing
// tagset and adding them first in list.
for (PossibleValue value : possibleValues) {
for (Tag tag : valuesFromTagset) {
if (value.getValue().equalsIgnoreCase(tag.getName())) {
//Matching values found in tagset and shown in dropdown
rulesIndicator.rulesApplied();
// HACK BEGIN
tag.setReordered(true);
// HACK END
//Avoid duplicate entries
if(!returnList.contains(tag)){
returnList.add(tag);
}
}
}
}
//If no matching tags found
if(returnList.isEmpty()){
rulesIndicator.didntMatchAnyTag();
}
return returnList;
}
public class LayerSelector
extends DropDownChoice<AnnotationLayer>
{
private static final long serialVersionUID = 2233133653137312264L;
public LayerSelector(String aId, List<? extends AnnotationLayer> aChoices)
{
super(aId, aChoices);
setOutputMarkupId(true);
setChoiceRenderer(new ChoiceRenderer<AnnotationLayer>("uiName"));
add(new AjaxFormComponentUpdatingBehavior("change")
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
// If "remember layer" is set, the we really just update the selected layer...
// we do not touch the selected annotation not the annotation detail panel
if (bModel.getPreferences().isRememberLayer()) {
bModel.setSelectedAnnotationLayer(getModelObject());
}
// If "remember layer" is not set, then changing the layer means that we want
// to change the type of the currently selected annotation
else if (
!bModel.getSelectedAnnotationLayer().equals(getModelObject()) &&
bModel.getSelection().getAnnotation().isSet())
{
if (bModel.getSelection().isRelationAnno()) {
try {
actionClear(aTarget, bModel);
}
catch (UIMAException | ClassNotFoundException | IOException
| BratAnnotationException e) {
error(e.getMessage());
}
}
else {
deleteModal.setContent(new DeleteOrReplaceAnnotationModalPanel(
deleteModal.getContentId(), bModel, deleteModal,
AnnotationDetailEditorPanel.this, getModelObject(), true));
deleteModal
.setWindowClosedCallback(new ModalWindow.WindowClosedCallback()
{
private static final long serialVersionUID = 4364820331676014559L;
@Override
public void onClose(AjaxRequestTarget target)
{
target.add(annotationFeatureForm);
}
});
deleteModal.show(aTarget);
}
}
// If no annotation is selected, then prime the annotation detail panel for the
// new type
else {
bModel.setSelectedAnnotationLayer(getModelObject());
selectedAnnotationLayer.setDefaultModelObject(getModelObject().getUiName());
aTarget.add(selectedAnnotationLayer);
clearFeatures(aTarget);
}
}
});
}
}
private FeatureModel getFeatureModel(AnnotationFeature aFeature)
{
for (FeatureModel f : featureModels) {
if (f.feature.getId() == aFeature.getId()) {
return f;
}
}
return null;
}
/**
* Represents a link with a role in the UI.
*/
public static class LinkWithRoleModel
implements Serializable
{
private static final long serialVersionUID = 2027345278696308900L;
public static final String CLICK_HINT = "<Click to activate>";
public String role;
public String label = CLICK_HINT;
public int targetAddr = -1;
public boolean autoCreated;
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((label == null) ? 0 : label.hashCode());
result = prime * result + ((role == null) ? 0 : role.hashCode());
result = prime * result + targetAddr;
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
LinkWithRoleModel other = (LinkWithRoleModel) obj;
if (label == null) {
if (other.label != null) {
return false;
}
}
else if (!label.equals(other.label)) {
return false;
}
if (role == null) {
if (other.role != null) {
return false;
}
}
else if (!role.equals(other.role)) {
return false;
}
if (targetAddr != other.targetAddr) {
return false;
}
return true;
}
}
private void updateForwardAnnotation(ActionContext aBModel) {
if (aBModel.getSelectedAnnotationLayer() != null
&& !aBModel.getSelectedAnnotationLayer().isLockToTokenOffset()) {
aBModel.setForwardAnnotation(false);// no forwarding for
// sub-/multitoken annotation
} else {
aBModel.setForwardAnnotation(aBModel.isForwardAnnotation());
}
}
public static class FeatureModel
implements Serializable
{
private static final long serialVersionUID = 3512979848975446735L;
public final AnnotationFeature feature;
public Serializable value;
public FeatureModel(AnnotationFeature aFeature, Serializable aValue)
{
feature = aFeature;
value = aValue;
// Avoid having null here because otherwise we have to handle null in zillion places!
if (value == null && MultiValueMode.ARRAY.equals(aFeature.getMultiValueMode())) {
value = new ArrayList<>();
}
}
}
private Map<String, String> getBindTags() {
AnnotationFeature f = annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0);
TagSet tagSet = f.getTagset();
Map<Character, String> tagNames = new LinkedHashMap<>();
Map<String, String> bindTag2Key = new LinkedHashMap<>();
for (Tag tag : annotationService.listTags(tagSet)) {
if (tagNames.containsKey(tag.getName().toLowerCase().charAt(0))) {
String oldBinding = tagNames.get(tag.getName().toLowerCase().charAt(0));
String newBinding = oldBinding + tag.getName().toLowerCase().charAt(0);
tagNames.put(tag.getName().toLowerCase().charAt(0), newBinding);
bindTag2Key.put(newBinding, tag.getName());
} else {
tagNames.put(tag.getName().toLowerCase().charAt(0), tag.getName().toLowerCase().substring(0, 1));
bindTag2Key.put(tag.getName().toLowerCase().substring(0, 1), tag.getName());
}
}
return bindTag2Key;
}
private String getKeyBindValue(String aKey, Map<String, String> aBindTags){
// check if all the key pressed are the same character
// if not, just check a Tag for the last char pressed
if(aKey.isEmpty()){
return aBindTags.get(aBindTags.keySet().iterator().next());
}
char prevC = aKey.charAt(0);
for(char ch:aKey.toCharArray()){
if(ch!=prevC){
break;
}
}
if (aBindTags.get(aKey)!=null){
return aBindTags.get(aKey);
}
// re-cycle suggestions
if(aBindTags.containsKey(aKey.substring(0,1))){
selectedTag = aKey.substring(0,1);
return aBindTags.get(aKey.substring(0,1));
}
// set it to the first in the tag list , when arbitrary key is pressed
return aBindTags.get(aBindTags.keySet().iterator().next());
}
public void reset(AjaxRequestTarget aTarget)
{
bModel.getSelection().clear();
bModel.getSelection().setBegin(0);
bModel.getSelection().setEnd(0);
clearFeatures(aTarget);
}
public void refresh(AjaxRequestTarget aTarget)
throws BratAnnotationException
{
try {
if (!bModel.getSelection().isRelationAnno()) {
updateLayersDropdown(bModel);
}
JCas aJCas = getCas(bModel);
if (bModel.getSelection().isRelationAnno()) {
arcSelected(aTarget, aJCas);
}
else {
spanSelected(aTarget, aJCas);
}
updateRememberLayer();
aTarget.add(annotationFeatureForm);
}
catch (BratAnnotationException e) {
throw e;
}
catch (Exception e) {
throw new BratAnnotationException(e);
}
}
private void updateRememberLayer()
{
if (bModel.getPreferences().isRememberLayer()) {
if (bModel.getDefaultAnnotationLayer() == null) {
bModel.setDefaultAnnotationLayer(bModel.getSelectedAnnotationLayer());
}
}
else if (!bModel.getSelection().isRelationAnno()) {
bModel.setDefaultAnnotationLayer(bModel.getSelectedAnnotationLayer());
}
// if no layer is selected in Settings
if (bModel.getSelectedAnnotationLayer() != null) {
selectedAnnotationLayer.setDefaultModelObject(
bModel.getSelectedAnnotationLayer().getUiName());
}
}
/**
* remove this model, if new annotation is to be created
*/
public void clearArmedSlotModel()
{
for (FeatureModel fm : featureModels) {
if (StringUtils.isNotBlank(fm.feature.getLinkTypeName())) {
fm.value = new ArrayList<>();
}
}
}
private Set<AnnotationFS> getAttachedRels(JCas aJCas, AnnotationFS aFs, AnnotationLayer aLayer) throws UIMAException, ClassNotFoundException, IOException{
Set<AnnotationFS> toBeDeleted = new HashSet<AnnotationFS>();
for (AnnotationLayer relationLayer : annotationService
.listAttachedRelationLayers(aLayer)) {
ArcAdapter relationAdapter = (ArcAdapter) getAdapter(annotationService,
relationLayer);
Type relationType = CasUtil.getType(aJCas.getCas(), relationLayer.getName());
Feature sourceFeature = relationType.getFeatureByBaseName(relationAdapter
.getSourceFeatureName());
Feature targetFeature = relationType.getFeatureByBaseName(relationAdapter
.getTargetFeatureName());
// This code is already prepared for the day that relations can go between
// different layers and may have different attach features for the source and
// target layers.
Feature relationSourceAttachFeature = null;
Feature relationTargetAttachFeature = null;
if (relationAdapter.getAttachFeatureName() != null) {
relationSourceAttachFeature = sourceFeature.getRange().getFeatureByBaseName(
relationAdapter.getAttachFeatureName());
relationTargetAttachFeature = targetFeature.getRange().getFeatureByBaseName(
relationAdapter.getAttachFeatureName());
}
for (AnnotationFS relationFS : CasUtil.select(aJCas.getCas(), relationType)) {
// Here we get the annotations that the relation is pointing to in the UI
FeatureStructure sourceFS;
if (relationSourceAttachFeature != null) {
sourceFS = relationFS.getFeatureValue(sourceFeature).getFeatureValue(
relationSourceAttachFeature);
}
else {
sourceFS = relationFS.getFeatureValue(sourceFeature);
}
FeatureStructure targetFS;
if (relationTargetAttachFeature != null) {
targetFS = relationFS.getFeatureValue(targetFeature).getFeatureValue(
relationTargetAttachFeature);
}
else {
targetFS = relationFS.getFeatureValue(targetFeature);
}
if (isSame(sourceFS, aFs) || isSame(targetFS, aFs)) {
toBeDeleted.add(relationFS);
LOG.debug("Deleted relation [" + getAddr(relationFS) + "] from layer ["
+ relationLayer.getName() + "]");
}
}
}
return toBeDeleted;
}
public AnnotationFeatureForm getAnnotationFeatureForm()
{
return annotationFeatureForm;
}
public Label getSelectedAnnotationLayer()
{
return selectedAnnotationLayer;
}
private boolean isFeatureModelChanged(AnnotationLayer aLayer){
for(FeatureModel fM: featureModels){
if(!annotationService.listAnnotationFeature(aLayer).contains(fM.feature)){
return true;
}
}
return false;
}
private boolean isForwardable() {
if (bModel.getSelectedAnnotationLayer() == null) {
return false;
}
if (bModel.getSelectedAnnotationLayer().getId() <= 0) {
return false;
}
if (!bModel.getSelectedAnnotationLayer().getType().equals(WebAnnoConst.SPAN_TYPE)) {
return false;
}
if (!bModel.getSelectedAnnotationLayer().isLockToTokenOffset()) {
return false;
}
// no forward annotation for multifeature layers.
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).size()>1){
return false;
}
// if there are no features at all, no forward annotation
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).isEmpty()){
return false;
}
// we allow forward annotation only for a feature with a tagset
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0).getTagset()==null){
return false;
}
TagSet tagSet = annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0).getTagset();
// there should be at least one tag in the tagset
if(annotationService.listTags(tagSet).size()==0){
return false;
}
return true;
}
private static String generateMessage(AnnotationLayer aLayer, String aLabel, boolean aDeleted)
{
String action = aDeleted ? "deleted" : "created/updated";
String msg = "The [" + aLayer.getUiName() + "] annotation has been " + action + ".";
if (StringUtils.isNotBlank(aLabel)) {
msg += " Label: [" + aLabel + "]";
}
return msg;
}
class StyledComboBox<T>
extends ComboBox<T>
{
private static final long serialVersionUID = 1L;
public StyledComboBox(String id, IModel<String> model, List<T> choices)
{
super(id, model, choices);
}
public StyledComboBox(String string, List<T> choices)
{
super(string, choices);
}
@Override
protected void onInitialize()
{
super.onInitialize();
add(new Behavior() {
private static final long serialVersionUID = -5674186692106167407L;
@Override
public void renderHead(Component aComponent, IHeaderResponse aResponse)
{
super.renderHead(aComponent, aResponse);
// Force-remove KendoDataSource header item if there already is one. This allows
// Wicket to re-declare the datasource for the callback URL of the new instance
// of this feature editor.
// This causes all the choices to be transferred again, but at least tags added
// to open tagsets appear immediately in the dropdown list and constraints
// apply (hopefully).
// Note: this must be done here instead of before the call to super such that
// first the old datasource declarations are removed and then the new one is
// added and remains in the HTML. Here we rely on the fact that the feature
// editors have a fixed markup ID (which we also rely on for restoring focus).
aResponse.render(new PriorityHeaderItem(JavaScriptHeaderItem.forScript(
"$('head script[id=kendo-datasource_" +
StyledComboBox.this.getMarkupId() + "]').remove();",
null)));
}
});
}
@Override
protected IJQueryTemplate newTemplate()
{
return new IJQueryTemplate()
{
private static final long serialVersionUID = 1L;
/**
* Marks the reordered entries in bold.
* Same as text feature editor.
*/
@Override
public String getText()
{
// Some docs on how the templates work in Kendo, in case we need
// more fancy dropdowns
// http://docs.telerik.com/kendo-ui/framework/templates/overview
StringBuilder sb = new StringBuilder();
sb.append("# if (data.reordered == 'true') { #");
sb.append("<div title=\"#: data.description #\"><b>#: data.name #</b></div>\n");
sb.append("# } else { #");
sb.append("<div title=\"#: data.description #\">#: data.name #</div>\n");
sb.append("# } #");
return sb.toString();
}
@Override
public List<String> getTextProperties()
{
return Arrays.asList("name", "description", "reordered");
}
};
}
}
} | webanno-brat/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/annotation/component/AnnotationDetailEditorPanel.java | /*
* Copyright 2015
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.brat.annotation.component;
import static de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeUtil.getAdapter;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getAddr;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getFeature;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getNextSentenceAddress;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.findWindowStartCenteringOnSelection;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.getSentenceNumber;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.isSame;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.selectAt;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.selectByAddr;
import static de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil.setFeature;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.NoResultException;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.uima.UIMAException;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASRuntimeException;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeatureStructure;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.fit.util.CasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxEventBehavior;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.attributes.AjaxCallListener;
import org.apache.wicket.ajax.attributes.AjaxRequestAttributes;
import org.apache.wicket.ajax.attributes.IAjaxCallListener;
import org.apache.wicket.ajax.attributes.ThrottlingSettings;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.form.AjaxFormValidatingBehavior;
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem;
import org.apache.wicket.markup.head.PriorityHeaderItem;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.AbstractTextComponent;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.ChoiceRenderer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.RefreshingView;
import org.apache.wicket.markup.repeater.util.ModelIteratorAdapter;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.request.Request;
import org.apache.wicket.request.cycle.RequestCycle;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.apache.wicket.util.time.Duration;
import org.codehaus.plexus.util.StringUtils;
import com.googlecode.wicket.jquery.core.Options;
import com.googlecode.wicket.jquery.core.template.IJQueryTemplate;
import com.googlecode.wicket.jquery.ui.widget.tooltip.TooltipBehavior;
import com.googlecode.wicket.kendo.ui.form.NumberTextField;
import com.googlecode.wicket.kendo.ui.form.TextField;
import com.googlecode.wicket.kendo.ui.form.combobox.ComboBox;
import de.tudarmstadt.ukp.clarin.webanno.api.AnnotationService;
import de.tudarmstadt.ukp.clarin.webanno.api.RepositoryService;
import de.tudarmstadt.ukp.clarin.webanno.api.WebAnnoConst;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.ArcAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.ChainAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.SpanAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeAdapter;
import de.tudarmstadt.ukp.clarin.webanno.brat.adapter.TypeUtil;
import de.tudarmstadt.ukp.clarin.webanno.brat.annotation.action.ActionContext;
import de.tudarmstadt.ukp.clarin.webanno.brat.annotation.action.Selection;
import de.tudarmstadt.ukp.clarin.webanno.brat.exception.BratAnnotationException;
import de.tudarmstadt.ukp.clarin.webanno.brat.message.SpanAnnotationResponse;
import de.tudarmstadt.ukp.clarin.webanno.brat.render.BratAjaxCasUtil;
import de.tudarmstadt.ukp.clarin.webanno.brat.render.model.VID;
import de.tudarmstadt.ukp.clarin.webanno.brat.util.JavascriptUtils;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.Evaluator;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.PossibleValue;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.RulesIndicator;
import de.tudarmstadt.ukp.clarin.webanno.constraints.evaluator.ValuesGenerator;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationFeature;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer;
import de.tudarmstadt.ukp.clarin.webanno.model.Mode;
import de.tudarmstadt.ukp.clarin.webanno.model.MultiValueMode;
import de.tudarmstadt.ukp.clarin.webanno.model.SourceDocumentState;
import de.tudarmstadt.ukp.clarin.webanno.model.Tag;
import de.tudarmstadt.ukp.clarin.webanno.model.TagSet;
import de.tudarmstadt.ukp.clarin.webanno.support.DefaultFocusBehavior;
import de.tudarmstadt.ukp.clarin.webanno.support.DefaultFocusBehavior2;
import de.tudarmstadt.ukp.clarin.webanno.support.DescriptionTooltipBehavior;
import de.tudarmstadt.ukp.dkpro.core.api.lexmorph.type.pos.POS;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
import de.tudarmstadt.ukp.dkpro.core.api.syntax.type.dependency.Dependency;
/**
* Annotation Detail Editor Panel.
*
*/
public class AnnotationDetailEditorPanel
extends Panel
{
private static final long serialVersionUID = 7324241992353693848L;
private static final Log LOG = LogFactory.getLog(AnnotationDetailEditorPanel.class);
@SpringBean(name = "documentRepository")
private RepositoryService repository;
@SpringBean(name = "annotationService")
private AnnotationService annotationService;
private AnnotationFeatureForm annotationFeatureForm;
private Label selectedTextLabel;
private CheckBox forwardAnnotationCheck;
private RefreshingView<FeatureModel> featureValues;
private AjaxButton deleteButton;
private AjaxButton reverseButton;
private LayerSelector layerSelector;
private TextField<String> forwardAnnotationText;
private Label selectedAnnotationLayer;
private ModalWindow deleteModal;
private List<AnnotationLayer> annotationLayers = new ArrayList<AnnotationLayer>();
private List<FeatureModel> featureModels;
private ActionContext bModel;
private String selectedTag = "";
/**
* Function to return tooltip using jquery
* Docs for the JQuery tooltip widget that we configure below:
* https://api.jqueryui.com/tooltip/
*/
private final String functionForTooltip = "function() { return "
+ "'<div class=\"tooltip-title\">'+($(this).text() "
+ "? $(this).text() : 'no title')+'</div>"
+ "<div class=\"tooltip-content tooltip-pre\">'+($(this).attr('title') "
+ "? $(this).attr('title') : 'no description' )+'</div>' }";
public AnnotationDetailEditorPanel(String id, IModel<ActionContext> aModel)
{
super(id, aModel);
bModel = aModel.getObject();
featureModels = new ArrayList<>();
annotationFeatureForm = new AnnotationFeatureForm("annotationFeatureForm",
aModel.getObject())
{
private static final long serialVersionUID = 8081614428845920047L;
@Override
protected void onConfigure()
{
super.onConfigure();
// Avoid reversing in read-only layers
setEnabled(bModel.getDocument() != null && !isAnnotationFinished());
}
};
annotationFeatureForm.setOutputMarkupId(true);
annotationFeatureForm.add(new AjaxFormValidatingBehavior(annotationFeatureForm, "submit") {
private static final long serialVersionUID = -5642108496844056023L;
@Override
protected void onSubmit(AjaxRequestTarget aTarget) {
try {
actionAnnotate(aTarget, bModel, false);
} catch (UIMAException | ClassNotFoundException | IOException | BratAnnotationException e) {
error(e.getMessage());
}
}
});
add(annotationFeatureForm);
}
public boolean isAnnotationFinished()
{
if (bModel.getMode().equals(Mode.CURATION)) {
return bModel.getDocument().getState().equals(SourceDocumentState.CURATION_FINISHED);
}
else {
return repository.getAnnotationDocument(bModel.getDocument(), bModel.getUser())
.getState().equals(AnnotationDocumentState.FINISHED);
}
}
private class AnnotationFeatureForm
extends Form<ActionContext>
{
private static final long serialVersionUID = 3635145598405490893L;
private WebMarkupContainer featureEditorsContainer;
public AnnotationFeatureForm(String id, ActionContext aBModel)
{
super(id, new CompoundPropertyModel<ActionContext>(aBModel));
add(forwardAnnotationCheck = new CheckBox("forwardAnnotation")
{
private static final long serialVersionUID = 8908304272310098353L;
@Override
protected void onConfigure()
{
super.onConfigure();
setEnabled(isForwardable());
updateForwardAnnotation(bModel);
}
});
forwardAnnotationCheck.add(new AjaxFormComponentUpdatingBehavior("change")
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
updateForwardAnnotation(getModelObject());
if(bModel.isForwardAnnotation()){
aTarget.appendJavaScript(JavascriptUtils.getFocusScript(forwardAnnotationText));
selectedTag = "";
}
}
});
forwardAnnotationCheck.setOutputMarkupId(true);
add(new Label("noAnnotationWarning", "No annotation selected!"){
private static final long serialVersionUID = -6046409838139863541L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(!bModel.getSelection().getAnnotation().isSet());
}
});
add(deleteButton = new AjaxButton("delete")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
// Avoid deleting in read-only layers
setEnabled(bModel.getSelectedAnnotationLayer() != null
&& !bModel.getSelectedAnnotationLayer().isReadonly());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
try {
JCas jCas = getCas(bModel);
AnnotationFS fs = selectByAddr(jCas, bModel.getSelection().getAnnotation().getId());
AnnotationLayer layer = bModel.getSelectedAnnotationLayer();
TypeAdapter adapter = getAdapter(annotationService, layer);
if (adapter instanceof SpanAdapter && getAttachedRels(jCas, fs, layer).size() > 0) {
deleteModal.setTitle("Are you sure you like to delete all attached relations to this span annotation?");
deleteModal.setContent(new DeleteOrReplaceAnnotationModalPanel(
deleteModal.getContentId(), bModel, deleteModal,
AnnotationDetailEditorPanel.this,
bModel.getSelectedAnnotationLayer(), false));
deleteModal.show(aTarget);
}
else {
actionDelete(aTarget, bModel);
}
}
catch (UIMAException | ClassNotFoundException | IOException
| CASRuntimeException | BratAnnotationException e) {
error(e.getMessage());
}
}
});
add(reverseButton = new AjaxButton("reverse")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().isRelationAnno()
&& bModel.getSelection().getAnnotation().isSet()
&& bModel.getSelectedAnnotationLayer().getType()
.equals(WebAnnoConst.RELATION_TYPE));
// Avoid reversing in read-only layers
setEnabled(bModel.getSelectedAnnotationLayer() != null
&& !bModel.getSelectedAnnotationLayer().isReadonly());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
aTarget.addChildren(getPage(), FeedbackPanel.class);
try {
actionReverse(aTarget, bModel);
}
catch (BratAnnotationException e) {
aTarget.prependJavaScript("alert('" + e.getMessage() + "')");
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(e.getMessage(), e);
}
}
});
reverseButton.setOutputMarkupPlaceholderTag(true);
add(new AjaxButton("clear")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
}
@Override
public void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
aTarget.addChildren(getPage(), FeedbackPanel.class);
try {
actionClear(aTarget, bModel);
}
catch (UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(e.getMessage(), e);
}
}
});
add(layerSelector = new LayerSelector("defaultAnnotationLayer", annotationLayers));
featureEditorsContainer = new WebMarkupContainer("featureEditorsContainer")
{
private static final long serialVersionUID = 8908304272310098353L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getSelection().getAnnotation().isSet());
}
};
// Add placeholder since wmc might start out invisible. Without the placeholder we
// cannot make it visible in an AJAX call
featureEditorsContainer.setOutputMarkupPlaceholderTag(true);
featureEditorsContainer.setOutputMarkupId(true);
featureEditorsContainer.add(new Label("noFeaturesWarning", "No features available!") {
private static final long serialVersionUID = 4398704672665066763L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(featureModels.isEmpty());
}
});
featureValues = new FeatureEditorPanelContent("featureValues");
featureEditorsContainer.add(featureValues);
forwardAnnotationText = new TextField<String>("forwardAnno");
forwardAnnotationText.setOutputMarkupId(true);
forwardAnnotationText.add(new AjaxFormComponentUpdatingBehavior("keyup") {
private static final long serialVersionUID = 4554834769861958396L;
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes attributes) {
super.updateAjaxAttributes(attributes);
IAjaxCallListener listener = new AjaxCallListener(){
private static final long serialVersionUID = -7968540662654079601L;
@Override
public CharSequence getPrecondition(Component component) {
return "var keycode = Wicket.Event.keyCode(attrs.event);" +
" return true;" ;
}
};
attributes.getAjaxCallListeners().add(listener);
attributes.getDynamicExtraParameters()
.add("var eventKeycode = Wicket.Event.keyCode(attrs.event);" +
"return {keycode: eventKeycode};");
attributes.setAllowDefault(true);
}
@Override
protected void onUpdate(AjaxRequestTarget aTarget) {
final Request request = RequestCycle.get().getRequest();
final String jsKeycode = request.getRequestParameters()
.getParameterValue("keycode").toString("");
if (jsKeycode.equals("32")){
try {
actionAnnotate(aTarget, aBModel, false);
selectedTag ="";
} catch (UIMAException | ClassNotFoundException | IOException | BratAnnotationException e) {
error(e);
}
return;
}
if (jsKeycode.equals("13")){
selectedTag ="";
return;
}
selectedTag = (forwardAnnotationText.getModelObject() == null ? ""
: forwardAnnotationText.getModelObject().charAt(0)) + selectedTag;
Map<String, String> bindTags = getBindTags();
if (!bindTags.isEmpty()) {
featureModels.get(0).value = getKeyBindValue(selectedTag, bindTags);
}
aTarget.add(forwardAnnotationText);
aTarget.add(featureValues.get(0));
}
});
forwardAnnotationText.setOutputMarkupId(true);
forwardAnnotationText.add(new AttributeAppender("style", "opacity:0", ";"));
// forwardAnno.add(new AttributeAppender("style", "filter:alpha(opacity=0)", ";"));
add(forwardAnnotationText);
// the selected text for annotation
selectedTextLabel = new Label("selectedText", PropertyModel.of(getModelObject(),
"selection.text"));
selectedTextLabel.setOutputMarkupId(true);
featureEditorsContainer.add(selectedTextLabel);
featureEditorsContainer.add(new Label("layerName","Layer"){
private static final long serialVersionUID = 6084341323607243784L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getPreferences().isRememberLayer());
}
});
featureEditorsContainer.setOutputMarkupId(true);
// the annotation layer for the selected annotation
selectedAnnotationLayer = new Label("selectedAnnotationLayer", new Model<String>())
{
private static final long serialVersionUID = 4059460390544343324L;
@Override
protected void onConfigure()
{
super.onConfigure();
setVisible(bModel.getPreferences().isRememberLayer());
}
};
selectedAnnotationLayer.setOutputMarkupId(true);
featureEditorsContainer.add(selectedAnnotationLayer);
add(featureEditorsContainer);
add(deleteModal = new ModalWindow("yesNoModal"));
deleteModal.setOutputMarkupId(true);
deleteModal.setInitialWidth(600);
deleteModal.setInitialHeight(50);
deleteModal.setResizable(true);
deleteModal.setWidthUnit("px");
deleteModal.setHeightUnit("px");
deleteModal.setTitle("Are you sure you want to delete the existing annotation?");
}
}
public void actionAnnotate(AjaxRequestTarget aTarget, ActionContext aBModel, boolean aIsForwarded)
throws UIMAException, ClassNotFoundException, IOException, BratAnnotationException
{
if (isAnnotationFinished()) {
throw new BratAnnotationException(
"This document is already closed. Please ask your project manager to re-open it via the Monitoring page");
}
// If there is no annotation yet, create one. During creation, the adapter
// may notice that it would create a duplicate and return the address of
// an existing annotation instead of a new one.
JCas jCas = getCas(aBModel);
actionAnnotate(aTarget, aBModel, jCas, aIsForwarded);
}
public void actionAnnotate(AjaxRequestTarget aTarget, ActionContext aBModel, JCas jCas, boolean aIsForwarded)
throws UIMAException, ClassNotFoundException, IOException, BratAnnotationException
{
if (aBModel.getSelectedAnnotationLayer() == null) {
error("No layer is selected. First select a layer.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
if (aBModel.getSelectedAnnotationLayer().isReadonly()) {
error("Layer is not editable.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
// Verify if input is valid according to tagset
for (int i = 0; i < featureModels.size(); i++) {
AnnotationFeature feature = featureModels.get(i).feature;
if (CAS.TYPE_NAME_STRING.equals(feature.getType())) {
String value = (String) featureModels.get(i).value;
// Check if tag is necessary, set, and correct
if (
value != null &&
feature.getTagset() != null &&
!feature.getTagset().isCreateTag() &&
!annotationService.existsTag(value, feature.getTagset())
) {
error("[" + value
+ "] is not in the tag list. Please choose from the existing tags");
return;
}
}
}
// #186 - After filling a slot, the annotation detail panel is not updated
aTarget.add(annotationFeatureForm);
TypeAdapter adapter = getAdapter(annotationService, aBModel.getSelectedAnnotationLayer());
Selection selection = aBModel.getSelection();
if (selection.getAnnotation().isNotSet()) {
if (bModel.getSelection().isRelationAnno()) {
AnnotationFS originFs = selectByAddr(jCas, selection.getOrigin());
AnnotationFS targetFs = selectByAddr(jCas, selection.getTarget());
if (adapter instanceof SpanAdapter) {
error("Layer do not support arc annotation.");
aTarget.addChildren(getPage(), FeedbackPanel.class);
return;
}
if (adapter instanceof ArcAdapter) {
AnnotationFS arc = ((ArcAdapter) adapter).add(originFs, targetFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), null, null);
selection.setAnnotation(new VID(getAddr(arc)));
if (selection.getAnnotation().isSet()) {
selection.setText("[" + originFs.getCoveredText() + "] - [" +
targetFs.getCoveredText() + "]");
}
else {
selection.setText("");
}
} else {
selection.setAnnotation(
new VID(((ChainAdapter) adapter).addArc(jCas, originFs, targetFs, null, null)));
if (selection.getAnnotation().isSet()) {
selection.setText(originFs.getCoveredText());
}
else {
selection.setText("");
}
}
selection.setBegin(originFs.getBegin());
} else if (adapter instanceof SpanAdapter) {
for (FeatureModel fm : featureModels) {
Serializable spanValue = ((SpanAdapter) adapter).getSpan(jCas, selection.getBegin(),
selection.getEnd(), fm.feature, null);
if (spanValue != null) {
// allow modification for forward annotation
if (aBModel.isForwardAnnotation()) {
fm.value = spanValue;
featureModels.get(0).value = spanValue;
selectedTag =
getBindTags().entrySet().stream().filter(e -> e.getValue().equals(spanValue))
.map(Map.Entry::getKey).findFirst().orElse(null);
} else {
actionClear(aTarget, bModel);
throw new BratAnnotationException("Cannot create another annotation of layer ["
+ bModel.getSelectedAnnotationLayer().getUiName() + "] at this"
+ " location - stacking is not enabled for this layer.");
}
}
}
Integer annoId = ((SpanAdapter) adapter).add(jCas, selection.getBegin(), selection.getEnd(), null, null);
selection.setAnnotation(new VID(annoId));
AnnotationFS annoFs = BratAjaxCasUtil.selectByAddr(jCas, annoId);
selection.set(jCas, annoFs.getBegin(), annoFs.getEnd());
} else {
for (FeatureModel fm : featureModels) {
Serializable spanValue = ((ChainAdapter) adapter).getSpan(jCas, selection.getBegin(),
selection.getEnd(), fm.feature, null);
if (spanValue != null) {
// allow modification for forward annotation
if (aBModel.isForwardAnnotation()) {
fm.value = spanValue;
featureModels.get(0).value = spanValue;
selectedTag =
getBindTags().entrySet().stream().filter(e -> e.getValue().equals(spanValue))
.map(Map.Entry::getKey).findFirst().orElse(null);
}
}
}
selection.setAnnotation(new VID(((ChainAdapter) adapter).addSpan(
jCas, selection.getBegin(), selection.getEnd(), null, null)));
selection.setText(jCas.getDocumentText().substring(
selection.getBegin(), selection.getEnd()));
}
}
// Set feature values
List<AnnotationFeature> features = new ArrayList<AnnotationFeature>();
for (FeatureModel fm : featureModels) {
features.add(fm.feature);
// For string features with extensible tagsets, extend the tagset
if (CAS.TYPE_NAME_STRING.equals(fm.feature.getType())) {
String value = (String) fm.value;
if (
value != null &&
fm.feature.getTagset() != null &&
fm.feature.getTagset().isCreateTag() &&
!annotationService.existsTag(value, fm.feature.getTagset())
) {
Tag selectedTag = new Tag();
selectedTag.setName(value);
selectedTag.setTagSet(fm.feature.getTagset());
annotationService.createTag(selectedTag, aBModel.getUser());
}
}
adapter.updateFeature(jCas, fm.feature, aBModel.getSelection().getAnnotation().getId(),
fm.value);
}
// Update progress information
int sentenceNumber = getSentenceNumber(jCas, aBModel.getSelection().getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
// persist changes
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
if (bModel.getSelection().isRelationAnno()) {
aBModel.setRememberedArcLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedArcFeatures(featureModels);
}
else {
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedSpanFeatures(featureModels);
}
aBModel.getSelection().setAnnotate(true);
if (aBModel.getSelection().getAnnotation().isSet()) {
String bratLabelText = TypeUtil.getBratLabelText(adapter,
selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId()), features);
info(generateMessage(aBModel.getSelectedAnnotationLayer(), bratLabelText, false));
}
onAnnotate(aTarget, aBModel);
if (aBModel.isForwardAnnotation() && !aIsForwarded && featureModels.get(0).value != null) {
if (aBModel.getSelection().getEnd() >= aBModel.getFirstVisibleSentenceEnd()) {
autoScroll(jCas, aBModel, true);
}
onAutoForward(aTarget, aBModel);
} else if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
forwardAnnotationText.setModelObject(null);
onChange(aTarget, aBModel);
if (aBModel.isForwardAnnotation() && featureModels.get(0).value != null) {
aTarget.add(annotationFeatureForm);
}
}
public void actionDelete(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, CASRuntimeException,
BratAnnotationException
{
JCas jCas = getCas(aBModel);
AnnotationFS fs = selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId());
// TODO We assume here that the selected annotation layer corresponds to the type of the
// FS to be deleted. It would be more robust if we could get the layer from the FS itself.
AnnotationLayer layer = aBModel.getSelectedAnnotationLayer();
TypeAdapter adapter = getAdapter(annotationService, layer);
// == DELETE ATTACHED RELATIONS ==
// If the deleted FS is a span, we must delete all relations that
// point to it directly or indirectly via the attachFeature.
//
// NOTE: It is important that this happens before UNATTACH SPANS since the attach feature
// is no longer set after UNATTACH SPANS!
if (adapter instanceof SpanAdapter) {
for (AnnotationFS attachedFs : getAttachedRels(jCas, fs, layer)) {
jCas.getCas().removeFsFromIndexes(attachedFs);
info("The attached annotation for relation type [" + annotationService
.getLayer(attachedFs.getType().getName(), bModel.getProject()).getUiName()
+ "] is deleted");
}
}
// == DELETE ATTACHED SPANS ==
// This case is currently not implemented because WebAnno currently does not allow to
// create spans that attach to other spans. The only span type for which this is relevant
// is the Token type which cannot be deleted.
// == UNATTACH SPANS ==
// If the deleted FS is a span that is attached to another span, the
// attachFeature in the other span must be set to null. Typical example: POS is deleted, so
// the pos feature of Token must be set to null. This is a quick case, because we only need
// to look at span annotations that have the same offsets as the FS to be deleted.
if (adapter instanceof SpanAdapter && layer.getAttachType() != null) {
Type spanType = CasUtil.getType(jCas.getCas(), layer.getAttachType().getName());
Feature attachFeature = spanType.getFeatureByBaseName(layer.getAttachFeature()
.getName());
for (AnnotationFS attachedFs : selectAt(jCas.getCas(), spanType, fs.getBegin(),
fs.getEnd())) {
if (isSame(attachedFs.getFeatureValue(attachFeature), fs)) {
attachedFs.setFeatureValue(attachFeature, null);
LOG.debug("Unattached [" + attachFeature.getShortName() + "] on annotation ["
+ getAddr(attachedFs) + "]");
}
}
}
// == CLEAN UP LINK FEATURES ==
// If the deleted FS is a span that is the target of a link feature, we must unset that
// link and delete the slot if it is a multi-valued link. Here, we have to scan all
// annotations from layers that have link features that could point to the FS
// to be deleted: the link feature must be the type of the FS or it must be generic.
if (adapter instanceof SpanAdapter) {
for (AnnotationFeature linkFeature : annotationService.listAttachedLinkFeatures(layer)) {
Type linkType = CasUtil.getType(jCas.getCas(), linkFeature.getLayer().getName());
for (AnnotationFS linkFS : CasUtil.select(jCas.getCas(), linkType)) {
List<LinkWithRoleModel> links = getFeature(linkFS, linkFeature);
Iterator<LinkWithRoleModel> i = links.iterator();
boolean modified = false;
while (i.hasNext()) {
LinkWithRoleModel link = i.next();
if (link.targetAddr == getAddr(fs)) {
i.remove();
LOG.debug("Cleared slot [" + link.role + "] in feature ["
+ linkFeature.getName() + "] on annotation [" + getAddr(linkFS)
+ "]");
modified = true;
}
}
if (modified) {
setFeature(linkFS, linkFeature, links);
}
}
}
}
// If the deleted FS is a relation, we don't have to do anything. Nothing can point to a
// relation.
if (adapter instanceof ArcAdapter) {
// Do nothing ;)
}
// Actually delete annotation
adapter.delete(jCas, aBModel.getSelection().getAnnotation());
// Store CAS again
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
// Update progress information
int sentenceNumber = getSentenceNumber(jCas, aBModel.getSelection().getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
// Auto-scroll
if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.getSelection().setAnnotate(false);
info(generateMessage(aBModel.getSelectedAnnotationLayer(), null, true));
// A hack to remember the visual DropDown display value
aBModel.setRememberedSpanLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedSpanFeatures(featureModels);
aBModel.getSelection().clear();
// after delete will follow annotation
bModel.getSelection().setAnnotate(true);
aTarget.add(annotationFeatureForm);
aTarget.add(deleteButton);
aTarget.add(reverseButton);
onChange(aTarget, aBModel);
onDelete(aTarget, aBModel, fs);
}
private void actionReverse(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, BratAnnotationException
{
JCas jCas;
jCas = getCas(aBModel);
AnnotationFS idFs = selectByAddr(jCas, aBModel.getSelection().getAnnotation().getId());
jCas.removeFsFromIndexes(idFs);
AnnotationFS originFs = selectByAddr(jCas, aBModel.getSelection().getOrigin());
AnnotationFS targetFs = selectByAddr(jCas, aBModel.getSelection().getTarget());
TypeAdapter adapter = getAdapter(annotationService, aBModel.getSelectedAnnotationLayer());
if (adapter instanceof ArcAdapter) {
if(featureModels.size()==0){
//If no features, still create arc #256
AnnotationFS arc = ((ArcAdapter) adapter).add(targetFs, originFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), null, null);
aBModel.getSelection().setAnnotation(new VID(getAddr(arc)));
}
else{
for (FeatureModel fm : featureModels) {
AnnotationFS arc = ((ArcAdapter) adapter).add(targetFs, originFs, jCas,
bModel.getWindowBeginOffset(), bModel.getWindowEndOffset(), fm.feature,
fm.value);
aBModel.getSelection().setAnnotation(new VID(getAddr(arc)));
}
}
}
else {
error("chains cannot be reversed");
return;
}
// persist changes
repository.writeCas(aBModel.getMode(), aBModel.getDocument(), aBModel.getUser(), jCas);
int sentenceNumber = getSentenceNumber(jCas, originFs.getBegin());
aBModel.setFocusSentenceNumber(sentenceNumber);
aBModel.getDocument().setSentenceAccessed(sentenceNumber);
if (aBModel.getPreferences().isScrollPage()) {
autoScroll(jCas, aBModel, false);
}
info("The arc has been reversed");
aBModel.setRememberedArcLayer(aBModel.getSelectedAnnotationLayer());
aBModel.setRememberedArcFeatures(featureModels);
// in case the user re-reverse it
int temp = aBModel.getSelection().getOrigin();
aBModel.getSelection().setOrigin(aBModel.getSelection().getTarget());
aBModel.getSelection().setTarget(temp);
onChange(aTarget, aBModel);
}
public void actionClear(AjaxRequestTarget aTarget, ActionContext aBModel)
throws IOException, UIMAException, ClassNotFoundException, BratAnnotationException
{
reset(aTarget);
aTarget.add(annotationFeatureForm);
onChange(aTarget, aBModel);
}
public JCas getCas(ActionContext aBModel)
throws UIMAException, IOException, ClassNotFoundException
{
if (aBModel.getMode().equals(Mode.ANNOTATION) || aBModel.getMode().equals(Mode.AUTOMATION)
|| aBModel.getMode().equals(Mode.CORRECTION)
|| aBModel.getMode().equals(Mode.CORRECTION_MERGE)) {
return repository.readAnnotationCas(aBModel.getDocument(), aBModel.getUser());
}
else {
return repository.readCurationCas(aBModel.getDocument());
}
}
/**
* Scroll the window of visible annotations.
*
* @param aForward
* instead of centering on the sentence that had the last editor, just scroll down
* one sentence. This is for forward-annotation mode.
*/
private void autoScroll(JCas jCas, ActionContext aBModel, boolean aForward)
{
if (aForward) {
// Fetch the first sentence on screen
Sentence sentence = selectByAddr(jCas, Sentence.class,
aBModel.getFirstVisibleSentenceAddress());
// Find the following one
int address = getNextSentenceAddress(jCas, sentence);
// Move to it
aBModel.setFirstVisibleSentence(selectByAddr(jCas, Sentence.class, address));
}
else {
// Fetch the first sentence on screen
Sentence sentence = selectByAddr(jCas, Sentence.class,
aBModel.getFirstVisibleSentenceAddress());
// Calculate the first sentence in the window in such a way that the annotation
// currently selected is in the center of the window
sentence = findWindowStartCenteringOnSelection(jCas, sentence,
aBModel.getSelection().getBegin(), aBModel.getProject(), aBModel.getDocument(),
aBModel.getPreferences().getWindowSize());
// Move to it
aBModel.setFirstVisibleSentence(sentence);
}
}
@SuppressWarnings("unchecked")
public void setSlot(AjaxRequestTarget aTarget, JCas aJCas, final ActionContext aBModel,
int aAnnotationId)
{
// Set an armed slot
if (!bModel.getSelection().isRelationAnno() && aBModel.isSlotArmed()) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) getFeatureModel(aBModel
.getArmedFeature()).value;
LinkWithRoleModel link = links.get(aBModel.getArmedSlot());
link.targetAddr = aAnnotationId;
link.label = selectByAddr(aJCas, aAnnotationId).getCoveredText();
aBModel.clearArmedSlot();
}
// Auto-commit if working on existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
try {
actionAnnotate(aTarget, bModel, aJCas, false);
}
catch (BratAnnotationException e) {
error(e.getMessage());
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
}
private void arcSelected(AjaxRequestTarget aTarget, JCas aJCas)
throws BratAnnotationException
{
// FIXME REC I think this whole section which meddles around with the selected annotation
// layer should be moved out of there to the place where we originally set the annotation
// layer...!
long layerId = TypeUtil.getLayerId(bModel.getSelection().getOriginType());
AnnotationLayer spanLayer = annotationService.getLayer(layerId);
if (
bModel.getPreferences().isRememberLayer() &&
bModel.getSelection().isAnnotate() &&
!spanLayer.equals(bModel.getDefaultAnnotationLayer()))
{
throw new BratAnnotationException("No relation annotation allowed on the "
+ "selected span layer");
}
// If we are creating a relation annotation, we have to set the current layer depending
// on the type of relation that is permitted between the source/target span. This is
// necessary because we have no separate UI control to set the relation annotation type.
// It is possible because currently only a single relation layer is allowed to attach to
// any given span layer.
if (bModel.getSelection().isAnnotate())
{
// If we drag an arc between POS annotations, then the relation must be a dependency
// relation.
// FIXME - Actually this case should be covered by the last case - the database lookup!
if (
spanLayer.isBuiltIn() &&
spanLayer.getName().equals(POS.class.getName()))
{
AnnotationLayer depLayer = annotationService.getLayer(Dependency.class.getName(),
bModel.getProject());
if (bModel.getAnnotationLayers().contains(depLayer)) {
bModel.setSelectedAnnotationLayer(depLayer);
}
else {
bModel.setSelectedAnnotationLayer(null);
}
}
// If we drag an arc in a chain layer, then the arc is of the same layer as the span
// Chain layers consist of arcs and spans
else if (spanLayer.getType().equals(WebAnnoConst.CHAIN_TYPE)) {
// one layer both for the span and arc annotation
bModel.setSelectedAnnotationLayer(spanLayer);
}
// Otherwise, look up the possible relation layer(s) in the database.
else {
for (AnnotationLayer layer : annotationService.listAnnotationLayer(bModel
.getProject())) {
if (layer.getAttachType() != null && layer.getAttachType().equals(spanLayer)) {
if (bModel.getAnnotationLayers().contains(layer)) {
bModel.setSelectedAnnotationLayer(layer);
}
else {
bModel.setSelectedAnnotationLayer(null);
}
break;
}
}
}
}
// Populate feature value from existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
AnnotationFS annoFs = selectByAddr(aJCas, bModel.getSelection().getAnnotation()
.getId());
// Try obtaining the layer from the feature structure
AnnotationLayer layer;
try {
layer = TypeUtil.getLayer(annotationService, bModel.getProject(), annoFs);
}
catch (NoResultException e) {
clearFeatures(aTarget);
throw new IllegalStateException("Unknown layer [" + annoFs.getType().getName() + "]", e);
}
populateFeatures(layer, annoFs, null);
}
// Avoid creation of arcs on locked layers
else if (bModel.getSelectedAnnotationLayer() != null
&& bModel.getSelectedAnnotationLayer().isReadonly()) {
bModel.setSelectedAnnotationLayer(new AnnotationLayer());
}
else {
populateFeatures(bModel.getSelectedAnnotationLayer(), null,
bModel.getRememberedArcFeatures());
}
bModel.setDefaultAnnotationLayer(spanLayer);
}
private void spanSelected(AjaxRequestTarget aTarget, JCas aJCas)
{
// Selecting an existing span annotation
if (bModel.getSelection().getAnnotation().isSet()) {
AnnotationFS annoFs = selectByAddr(aJCas, bModel.getSelection().getAnnotation()
.getId());
// Try obtaining the layer from the feature structure
AnnotationLayer layer;
try {
layer = TypeUtil.getLayer(annotationService, bModel.getProject(), annoFs);
}
catch (NoResultException e) {
clearFeatures(aTarget);
throw new IllegalStateException("Unknown layer [" + annoFs.getType().getName() + "]", e);
}
// If remember layer is off, then the current layer follows the selected annotations
if (!bModel.getPreferences().isRememberLayer()) {
bModel.setSelectedAnnotationLayer(layer);
}
// populate feature value
populateFeatures(layer, annoFs, null);
}
else {
populateFeatures(bModel.getSelectedAnnotationLayer(), null,
bModel.getRememberedSpanFeatures());
}
}
protected void onChange(AjaxRequestTarget aTarget, ActionContext aBModel)
{
// Overriden in BratAnnotator
}
protected void onAutoForward(AjaxRequestTarget aTarget, ActionContext aBModel)
{
// Overriden in BratAnnotator
}
protected void onAnnotate(AjaxRequestTarget aTarget, ActionContext aModel)
{
// Overriden in AutomationPage
}
protected void onDelete(AjaxRequestTarget aTarget, ActionContext aModel, AnnotationFS aFs)
{
// Overriden in AutomationPage
}
public void refreshAnnotationLayers(ActionContext aBModel)
{
updateLayersDropdown(aBModel);
if (annotationLayers.size() == 0) {
aBModel.setSelectedAnnotationLayer(new AnnotationLayer());
}
else if (aBModel.getSelectedAnnotationLayer() == null) {
if (aBModel.getRememberedSpanLayer() == null) {
aBModel.setSelectedAnnotationLayer(annotationLayers.get(0));
}
else {
aBModel.setSelectedAnnotationLayer(aBModel.getRememberedSpanLayer());
}
}
clearFeatures(null);
updateRememberLayer();
}
private void updateLayersDropdown(ActionContext aBModel)
{
annotationLayers.clear();
AnnotationLayer l = null;
for (AnnotationLayer layer : aBModel.getAnnotationLayers()) {
if (!layer.isEnabled() || layer.isReadonly()
|| layer.getName().equals(Token.class.getName())) {
continue;
}
if (layer.getType().equals(WebAnnoConst.SPAN_TYPE)) {
annotationLayers.add(layer);
l = layer;
}
// manage chain type
else if (layer.getType().equals(WebAnnoConst.CHAIN_TYPE)) {
for (AnnotationFeature feature : annotationService.listAnnotationFeature(layer)) {
if (!feature.isEnabled()) {
continue;
}
if (feature.getName().equals(WebAnnoConst.COREFERENCE_TYPE_FEATURE)) {
annotationLayers.add(layer);
}
}
}
// chain
}
if (bModel.getDefaultAnnotationLayer() != null) {
bModel.setSelectedAnnotationLayer(bModel.getDefaultAnnotationLayer());
}
else if (l != null) {
bModel.setSelectedAnnotationLayer(l);
}
}
public class FeatureEditorPanelContent
extends RefreshingView<FeatureModel>
{
private static final long serialVersionUID = -8359786805333207043L;
public FeatureEditorPanelContent(String aId)
{
super(aId);
setOutputMarkupId(true);
}
@SuppressWarnings("rawtypes")
@Override
protected void populateItem(final Item<FeatureModel> item)
{
// Feature editors that allow multiple values may want to update themselves,
// e.g. to add another slot.
item.setOutputMarkupId(true);
final FeatureModel fm = item.getModelObject();
final FeatureEditor frag;
switch (fm.feature.getMultiValueMode()) {
case NONE: {
switch (fm.feature.getType()) {
case CAS.TYPE_NAME_INTEGER: {
frag = new NumberFeatureEditor("editor", "numberFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_FLOAT: {
frag = new NumberFeatureEditor("editor", "numberFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_BOOLEAN: {
frag = new BooleanFeatureEditor("editor", "booleanFeatureEditor", item, fm);
break;
}
case CAS.TYPE_NAME_STRING: {
frag = new TextFeatureEditor("editor", "textFeatureEditor", item, fm);
break;
}
default:
throw new IllegalArgumentException("Unsupported type [" + fm.feature.getType()
+ "] on feature [" + fm.feature.getName() + "]");
}
break;
}
case ARRAY: {
switch (fm.feature.getLinkMode()) {
case WITH_ROLE: {
// If it is none of the primitive types, it must be a link feature
frag = new LinkFeatureEditor("editor", "linkFeatureEditor", item, fm);
break;
}
default:
throw new IllegalArgumentException("Unsupported link mode ["
+ fm.feature.getLinkMode() + "] on feature [" + fm.feature.getName()
+ "]");
}
break;
}
default:
throw new IllegalArgumentException("Unsupported multi-value mode ["
+ fm.feature.getMultiValueMode() + "] on feature [" + fm.feature.getName()
+ "]");
}
// We need to enable the markup ID here because we use it during the AJAX behavior that
// automatically saves feature editors on change/blur. Check addAnnotateActionBehavior.
frag.setOutputMarkupId(true);
item.add(frag);
if (!fm.feature.getLayer().isReadonly()) {
// whenever it is updating an annotation, it updates automatically when a component
// for the feature lost focus - but updating is for every component edited
// LinkFeatureEditors must be excluded because the auto-update will break the
// ability to add slots. Adding a slot is NOT an annotation action.
// TODO annotate every time except when position is at (0,0)
if (bModel.getSelection().getAnnotation().isSet()
&& !(frag instanceof LinkFeatureEditor)) {
if (frag.isDropOrchoice()) {
addAnnotateActionBehavior(frag, "change");
}
else {
addAnnotateActionBehavior(frag, "blur");
}
}
else if (!(frag instanceof LinkFeatureEditor)) {
if (frag.isDropOrchoice()) {
storeFeatureValue(frag, "change");
}
else {
storeFeatureValue(frag, "blur");
}
}
// Put focus on hidden input field if we are in forward-mode
if (bModel.isForwardAnnotation()) {
forwardAnnotationText.add(new DefaultFocusBehavior2());
}
// Put focus on first component if we select an existing annotation or create a
// new one
else if (
item.getIndex() == 0 &&
SpanAnnotationResponse.is(bModel.getUserAction())
) {
frag.getFocusComponent().add(new DefaultFocusBehavior());
}
// Restore/preserve focus when tabbing through the feature editors
else if (bModel.getUserAction() == null) {
AjaxRequestTarget target = RequestCycle.get().find(AjaxRequestTarget.class);
if (target != null && frag.getFocusComponent().getMarkupId()
.equals(target.getLastFocusedElementId())) {
target.focusComponent(frag.getFocusComponent());
}
}
// Add tooltip on label
StringBuilder tooltipTitle = new StringBuilder();
tooltipTitle.append(fm.feature.getUiName());
if (fm.feature.getTagset() != null) {
tooltipTitle.append(" (");
tooltipTitle.append(fm.feature.getTagset().getName());
tooltipTitle.append(')');
}
Component labelComponent = frag.getLabelComponent();
labelComponent.add(new AttributeAppender("style", "cursor: help", ";"));
labelComponent.add(new DescriptionTooltipBehavior(tooltipTitle.toString(),
fm.feature.getDescription()));
}
else {
frag.getFocusComponent().setEnabled(false);
}
}
private void storeFeatureValue(final FeatureEditor aFrag, String aEvent)
{
aFrag.getFocusComponent().add(new AjaxFormComponentUpdatingBehavior(aEvent)
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
aTarget.add(annotationFeatureForm);
}
});
}
private void addAnnotateActionBehavior(final FeatureEditor aFrag, String aEvent)
{
aFrag.getFocusComponent().add(new AjaxFormComponentUpdatingBehavior(aEvent)
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void updateAjaxAttributes(AjaxRequestAttributes aAttributes)
{
super.updateAjaxAttributes(aAttributes);
// When focus is on a feature editor and the user selects a new annotation,
// there is a race condition between the saving the value of the feature editor
// and the loading of the new annotation. Delay the feature editor save to give
// preference to loading the new annotation.
aAttributes.setThrottlingSettings(
new ThrottlingSettings(getMarkupId(), Duration.milliseconds(250), true));
aAttributes.getAjaxCallListeners().add(new AjaxCallListener()
{
private static final long serialVersionUID = 1L;
@Override
public CharSequence getPrecondition(Component aComponent)
{
// If the panel refreshes because the user selects
// a new annotation, the annotation editor panel is updated for the
// new annotation first (before saving values) because of the delay
// set above. When the delay is over, we can no longer save the value
// because the old component is no longer there. We use the markup id
// of the editor fragments to check if the old component is still there
// (i.e. if the user has just tabbed to a new field) or if the old
// component is gone (i.e. the user selected/created another annotation).
// If the old component is no longer there, we abort the delayed save
// action.
return "return $('#"+aFrag.getMarkupId()+"').length > 0;";
}
});
}
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
try {
if (bModel.getConstraints() != null) {
// Make sure we update the feature editor panel because due to
// constraints the contents may have to be re-rendered
aTarget.add(annotationFeatureForm);
}
actionAnnotate(aTarget, bModel, false);
}
catch (BratAnnotationException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
});
}
@Override
protected Iterator<IModel<FeatureModel>> getItemModels()
{
ModelIteratorAdapter<FeatureModel> i = new ModelIteratorAdapter<FeatureModel>(
featureModels)
{
@Override
protected IModel<FeatureModel> model(FeatureModel aObject)
{
return Model.of(aObject);
}
};
return i;
}
}
public static abstract class FeatureEditor
extends Fragment
{
private static final long serialVersionUID = -7275181609671919722L;
protected static final String ID_PREFIX = "featureEditorHead";
public FeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aMarkupProvider,
IModel<?> aModel)
{
super(aId, aMarkupId, aMarkupProvider, aModel);
}
public Component getLabelComponent()
{
return get("feature");
}
abstract public Component getFocusComponent();
abstract public boolean isDropOrchoice();
}
public static class NumberFeatureEditor<T extends Number>
extends FeatureEditor
{
private static final long serialVersionUID = -2426303638953208057L;
@SuppressWarnings("rawtypes")
private final NumberTextField field;
public NumberFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
add(new Label("feature", aModel.feature.getUiName()));
switch (aModel.feature.getType()) {
case CAS.TYPE_NAME_INTEGER: {
field = new NumberTextField<Integer>("value", Integer.class);
break;
}
case CAS.TYPE_NAME_FLOAT: {
field = new NumberTextField<Float>("value", Float.class);
add(field);
break;
}
default:
throw new IllegalArgumentException("Type [" + aModel.feature.getType()
+ "] cannot be rendered as a numeric input field");
}
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
}
@SuppressWarnings("rawtypes")
@Override
public NumberTextField getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return false;
}
};
public static class BooleanFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 5104979547245171152L;
private final CheckBox field;
public BooleanFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
add(new Label("feature", aModel.feature.getUiName()));
field = new CheckBox("value");
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
}
@Override
public Component getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return true;
}
};
public class TextFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 7763348613632105600L;
@SuppressWarnings("rawtypes")
private final AbstractTextComponent field;
private boolean isDrop;
//For showing the status of Constraints rules kicking in.
private RulesIndicator indicator = new RulesIndicator();
private boolean hideUnconstraintFeature;
/**
* Hides feature if "Hide un-constraint feature" is enabled
* and constraint rules are applied and feature doesn't match any constraint rule
*/
@Override
public boolean isVisible() {
if (hideUnconstraintFeature) {
//if enabled and constraints rule execution returns anything other than green
if (indicator.isAffected() && !indicator.getStatusColor().equals("green")) {
return false;
}
}
return true;
}
public TextFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
//Checks whether hide un-constraint feature is enabled or not
hideUnconstraintFeature = aModel.feature.isHideUnconstraintFeature();
add(new Label("feature", aModel.feature.getUiName()));
indicator.reset(); //reset the indicator
if (aModel.feature.getTagset() != null) {
List<Tag> tagset = null;
ActionContext model = bModel;
// verification to check whether constraints exist for this project or NOT
if (model.getConstraints() != null && model.getSelection().getAnnotation().isSet()) {
// indicator.setRulesExist(true);
tagset = populateTagsBasedOnRules(model, aModel);
}
else {
// indicator.setRulesExist(false);
// Earlier behavior,
tagset = annotationService.listTags(aModel.feature.getTagset());
}
field = new StyledComboBox<Tag>("value", tagset) {
private static final long serialVersionUID = -1735694425658462932L;
@Override
protected void onInitialize()
{
// Ensure proper order of the initializing JS header items: first combo box
// behavior (in super.onInitialize()), then tooltip.
Options options = new Options(DescriptionTooltipBehavior.makeTooltipOptions());
options.set("content", functionForTooltip);
add(new TooltipBehavior("#"+field.getMarkupId()+"_listbox *[title]", options) {
private static final long serialVersionUID = 1854141593969780149L;
@Override
protected String $()
{
// REC: It takes a moment for the KendoDatasource to load the data and
// for the Combobox to render the hidden dropdown. I did not find
// a way to hook into this process and to get notified when the
// data is available in the dropdown, so trying to handle this
// with a slight delay hopeing that all is set up after 1 second.
return "try {setTimeout(function () { " + super.$() + " }, 1000); } catch (err) {}; ";
}
});
super.onInitialize();
}
};
isDrop = true;
}
else {
field = new TextField<String>("value");
}
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
field.setOutputMarkupId(true);
field.setMarkupId(ID_PREFIX + aModel.feature.getId());
add(field);
//Shows whether constraints are triggered or not
//also shows state of constraints use.
Component constraintsInUseIndicator = new WebMarkupContainer("textIndicator"){
private static final long serialVersionUID = 4346767114287766710L;
@Override
public boolean isVisible()
{
return indicator.isAffected();
}
}.add(new AttributeAppender("class", new Model<String>(){
private static final long serialVersionUID = -7683195283137223296L;
@Override
public String getObject()
{
//adds symbol to indicator
return indicator.getStatusSymbol();
}
}))
.add(new AttributeAppender("style", new Model<String>(){
private static final long serialVersionUID = -5255873539738210137L;
@Override
public String getObject()
{
//adds color to indicator
return "; color: " + indicator.getStatusColor();
}
}));
add(constraintsInUseIndicator);
}
/**
* Adds and sorts tags based on Constraints rules
*/
private List<Tag> populateTagsBasedOnRules(ActionContext model, FeatureModel aModel)
{
// Add values from rules
String restrictionFeaturePath;
switch (aModel.feature.getLinkMode()) {
case WITH_ROLE:
restrictionFeaturePath = aModel.feature.getName() + "."
+ aModel.feature.getLinkTypeRoleFeatureName();
break;
case NONE:
restrictionFeaturePath = aModel.feature.getName();
break;
default:
throw new IllegalArgumentException("Unsupported link mode ["
+ aModel.feature.getLinkMode() + "] on feature ["
+ aModel.feature.getName() + "]");
}
List<Tag> valuesFromTagset = annotationService.listTags(aModel.feature.getTagset());
try {
JCas jCas = getCas(model);
FeatureStructure featureStructure = selectByAddr(jCas, model.getSelection()
.getAnnotation().getId());
Evaluator evaluator = new ValuesGenerator();
//Only show indicator if this feature can be affected by Constraint rules!
indicator.setAffected(evaluator.isThisAffectedByConstraintRules(featureStructure,
restrictionFeaturePath, model.getConstraints()));
List<PossibleValue> possibleValues;
try {
possibleValues = evaluator.generatePossibleValues(
featureStructure, restrictionFeaturePath, model.getConstraints());
LOG.debug("Possible values for [" + featureStructure.getType().getName() + "] ["
+ restrictionFeaturePath + "]: " + possibleValues);
}
catch (Exception e) {
error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e));
LOG.error("Unable to evaluate constraints: " + e.getMessage(), e);
possibleValues = new ArrayList<>();
}
// only adds tags which are suggested by rules and exist in tagset.
List<Tag> tagset = compareSortAndAdd(possibleValues, valuesFromTagset, indicator);
// add remaining tags
addRemainingTags(tagset, valuesFromTagset);
return tagset;
}
catch (IOException | ClassNotFoundException | UIMAException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
return valuesFromTagset;
}
@Override
public Component getFocusComponent()
{
return field;
}
@Override
public boolean isDropOrchoice()
{
return isDrop;
}
};
public class LinkFeatureEditor
extends FeatureEditor
{
private static final long serialVersionUID = 7469241620229001983L;
private WebMarkupContainer content;
//For showing the status of Constraints rules kicking in.
private RulesIndicator indicator = new RulesIndicator();
@SuppressWarnings("rawtypes")
private final AbstractTextComponent newRole;
private boolean isDrop;
private boolean hideUnconstraintFeature;
/**
* Hides feature if "Hide un-constraint feature" is enabled
* and constraint rules are applied and feature doesn't match any constraint rule
*/
@Override
public boolean isVisible() {
if (hideUnconstraintFeature) {
//if enabled and constraints rule execution returns anything other than green
if (indicator.isAffected() && !indicator.getStatusColor().equals("green")) {
return false;
}
}
return true;
}
@SuppressWarnings("unchecked")
public LinkFeatureEditor(String aId, String aMarkupId, Item<FeatureModel> aItem,
final FeatureModel aModel)
{
super(aId, aMarkupId, aItem, new CompoundPropertyModel<FeatureModel>(aModel));
//Checks whether hide un-constraint feature is enabled or not
hideUnconstraintFeature = aModel.feature.isHideUnconstraintFeature();
add(new Label("feature", aModel.feature.getUiName()));
// Most of the content is inside this container such that we can refresh it independently
// from the rest of the form
content = new WebMarkupContainer("content");
content.setOutputMarkupId(true);
add(content);
content.add(new RefreshingView<LinkWithRoleModel>("slots",
Model.of((List<LinkWithRoleModel>) aModel.value))
{
private static final long serialVersionUID = 5475284956525780698L;
@Override
protected Iterator<IModel<LinkWithRoleModel>> getItemModels()
{
ModelIteratorAdapter<LinkWithRoleModel> i = new ModelIteratorAdapter<LinkWithRoleModel>(
(List<LinkWithRoleModel>) LinkFeatureEditor.this.getModelObject().value)
{
@Override
protected IModel<LinkWithRoleModel> model(LinkWithRoleModel aObject)
{
return Model.of(aObject);
}
};
return i;
}
@Override
protected void populateItem(final Item<LinkWithRoleModel> aItem)
{
aItem.setModel(new CompoundPropertyModel<LinkWithRoleModel>(aItem
.getModelObject()));
Label role = new Label("role");
aItem.add(role);
final Label label;
if (aItem.getModelObject().targetAddr == -1
&& bModel.isArmedSlot(aModel.feature, aItem.getIndex())) {
label = new Label("label", "<Select to fill>");
}
else {
label = new Label("label");
}
label.add(new AjaxEventBehavior("click")
{
private static final long serialVersionUID = 7633309278417475424L;
@Override
protected void onEvent(AjaxRequestTarget aTarget)
{
if (bModel.isArmedSlot(aModel.feature, aItem.getIndex())) {
bModel.clearArmedSlot();
aTarget.add(content);
}
else {
bModel.setArmedSlot(aModel.feature, aItem.getIndex());
// Need to re-render the whole form because a slot in another
// link editor might get unarmed
aTarget.add(annotationFeatureForm);
}
}
});
label.add(new AttributeAppender("style", new Model<String>()
{
private static final long serialVersionUID = 1L;
@Override
public String getObject()
{
ActionContext model = bModel;
if (model.isArmedSlot(aModel.feature, aItem.getIndex())) {
return "; background: orange";
}
else {
return "";
}
}
}));
aItem.add(label);
}
});
if (aModel.feature.getTagset() != null) {
List<Tag> tagset = null;
//reset the indicator
indicator.reset();
if (bModel.getConstraints() != null && bModel.getSelection().getAnnotation().isSet()) {
// indicator.setRulesExist(true); //Constraint rules exist!
tagset = addTagsBasedOnRules(bModel, aModel);
}
else {
// indicator.setRulesExist(false); //No constraint rules.
// add tagsets only, earlier behavior
tagset = annotationService.listTags(aModel.feature.getTagset());
}
newRole = new StyledComboBox<Tag>("newRole", Model.of(""), tagset) {
private static final long serialVersionUID = 1L;
@Override
protected void onInitialize()
{
super.onInitialize();
// Ensure proper order of the initializing JS header items: first combo box
// behavior (in super.onInitialize()), then tooltip.
Options options = new Options(DescriptionTooltipBehavior.makeTooltipOptions());
options.set("content", functionForTooltip);
add(new TooltipBehavior("#"+newRole.getMarkupId()+"_listbox *[title]", options) {
private static final long serialVersionUID = -7207021885475073279L;
@Override
protected String $()
{
// REC: It takes a moment for the KendoDatasource to load the data and
// for the Combobox to render the hidden dropdown. I did not find
// a way to hook into this process and to get notified when the
// data is available in the dropdown, so trying to handle this
// with a slight delay hopeing that all is set up after 1 second.
return "try {setTimeout(function () { " + super.$() + " }, 1000); } catch (err) {}; ";
}
});
}
@Override
protected void onConfigure()
{
super.onConfigure();
if (bModel.isSlotArmed() && aModel.feature.equals(bModel.getArmedFeature())) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
setModelObject(links.get(bModel.getArmedSlot()).role);
}
else {
setModelObject("");
}
}
};
// Ensure that markup IDs of feature editor focus components remain constant across
// refreshs of the feature editor panel. This is required to restore the focus.
newRole.setOutputMarkupId(true);
newRole.setMarkupId(ID_PREFIX + aModel.feature.getId());
content.add(newRole);
isDrop = true;
}
else {
content.add(newRole = new TextField<String>("newRole", Model.of("")) {
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
super.onConfigure();
if (bModel.isSlotArmed() && aModel.feature.equals(bModel.getArmedFeature())) {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
setModelObject(links.get(bModel.getArmedSlot()).role);
}
else {
setModelObject("");
}
}
});
}
//Shows whether constraints are triggered or not
//also shows state of constraints use.
Component constraintsInUseIndicator = new WebMarkupContainer("linkIndicator"){
private static final long serialVersionUID = 4346767114287766710L;
@Override
public boolean isVisible()
{
return indicator.isAffected();
}
}.add(new AttributeAppender("class", new Model<String>(){
private static final long serialVersionUID = -7683195283137223296L;
@Override
public String getObject()
{
//adds symbol to indicator
return indicator.getStatusSymbol();
}
}))
.add(new AttributeAppender("style", new Model<String>(){
private static final long serialVersionUID = -5255873539738210137L;
@Override
public String getObject()
{
//adds color to indicator
return "; color: " + indicator.getStatusColor();
}
}));
add(constraintsInUseIndicator);
// Add a new empty slot with the specified role
content.add(new AjaxButton("add")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure(){
ActionContext model = bModel;
setVisible(!(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature())));
// setEnabled(!(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature())));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
if (StringUtils.isBlank((String) newRole.getModelObject())) {
error("Must set slot label before adding!");
aTarget.addChildren(getPage(), FeedbackPanel.class);
}
else {
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
LinkWithRoleModel m = new LinkWithRoleModel();
m.role = (String) newRole.getModelObject();
links.add(m);
bModel.setArmedSlot(LinkFeatureEditor.this.getModelObject().feature,
links.size() - 1);
// Need to re-render the whole form because a slot in another
// link editor might get unarmed
aTarget.add(annotationFeatureForm);
}
}
});
// Allows user to update slot
content.add(new AjaxButton("set"){
private static final long serialVersionUID = 7923695373085126646L;
@Override
protected void onConfigure(){
ActionContext model = bModel;
setVisible(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature()));
// setEnabled(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature()));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
ActionContext model = bModel;
//Update the slot
LinkWithRoleModel m = new LinkWithRoleModel();
m = links.get(model.getArmedSlot());
m.role = (String) newRole.getModelObject();
// int index = model.getArmedSlot(); //retain index
// links.remove(model.getArmedSlot());
// model.clearArmedSlot();
// links.add(m);
links.set(model.getArmedSlot(), m); //avoid reordering
aTarget.add(content);
try {
actionAnnotate(aTarget, bModel, false);
}
catch(BratAnnotationException e){
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCause(e),e);
}
catch (Exception e) {
error(e.getMessage());
LOG.error(ExceptionUtils.getRootCause(e),e);
}
}
});
// Add a new empty slot with the specified role
content.add(new AjaxButton("del")
{
private static final long serialVersionUID = 1L;
@Override
protected void onConfigure()
{
ActionContext model = bModel;
setVisible(model.isSlotArmed()
&& aModel.feature.equals(model.getArmedFeature()));
// setEnabled(model.isSlotArmed()
// && aModel.feature.equals(model.getArmedFeature()));
}
@Override
protected void onSubmit(AjaxRequestTarget aTarget, Form<?> aForm)
{
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
ActionContext model = bModel;
links.remove(model.getArmedSlot());
model.clearArmedSlot();
aTarget.add(content);
// Auto-commit if working on existing annotation
if (bModel.getSelection().getAnnotation().isSet()) {
try {
actionAnnotate(aTarget, bModel, false);
}
catch (BratAnnotationException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
catch (Exception e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
}
}
});
}
/**
* Adds tagset based on Constraints rules, auto-adds tags which are marked important.
*
* @return List containing tags which exist in tagset and also suggested by rules, followed
* by the remaining tags in tagset.
*/
private List<Tag> addTagsBasedOnRules(ActionContext model, final FeatureModel aModel)
{
String restrictionFeaturePath = aModel.feature.getName() + "."
+ aModel.feature.getLinkTypeRoleFeatureName();
List<Tag> valuesFromTagset = annotationService.listTags(aModel.feature.getTagset());
try {
JCas jCas = getCas(model);
FeatureStructure featureStructure = selectByAddr(jCas, model.getSelection()
.getAnnotation().getId());
Evaluator evaluator = new ValuesGenerator();
//Only show indicator if this feature can be affected by Constraint rules!
indicator.setAffected(evaluator.isThisAffectedByConstraintRules(featureStructure,
restrictionFeaturePath, model.getConstraints()));
List<PossibleValue> possibleValues;
try {
possibleValues = evaluator.generatePossibleValues(
featureStructure, restrictionFeaturePath, model.getConstraints());
LOG.debug("Possible values for [" + featureStructure.getType().getName() + "] ["
+ restrictionFeaturePath + "]: " + possibleValues);
}
catch (Exception e) {
error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e));
LOG.error("Unable to evaluate constraints: " + ExceptionUtils.getRootCauseMessage(e), e);
possibleValues = new ArrayList<>();
}
// Only adds tags which are suggested by rules and exist in tagset.
List<Tag> tagset = compareSortAndAdd(possibleValues, valuesFromTagset, indicator);
removeAutomaticallyAddedUnusedEntries();
// Create entries for important tags.
autoAddImportantTags(tagset, possibleValues);
// Add remaining tags.
addRemainingTags(tagset, valuesFromTagset);
return tagset;
}
catch (ClassNotFoundException | UIMAException | IOException e) {
error(ExceptionUtils.getRootCauseMessage(e));
LOG.error(ExceptionUtils.getRootCauseMessage(e), e);
}
return valuesFromTagset;
}
private void removeAutomaticallyAddedUnusedEntries()
{
// Remove unused (but auto-added) tags.
@SuppressWarnings("unchecked")
List<LinkWithRoleModel> list = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
Iterator<LinkWithRoleModel> existingLinks = list.iterator();
while (existingLinks.hasNext()) {
LinkWithRoleModel link = existingLinks.next();
if (link.autoCreated && link.targetAddr == -1) {
// remove it
existingLinks.remove();
}
}
}
private void autoAddImportantTags(List<Tag> aTagset, List<PossibleValue> possibleValues)
{
// Construct a quick index for tags
Set<String> tagset = new HashSet<String>();
for (Tag t : aTagset) {
tagset.add(t.getName());
}
// Get links list and build role index
@SuppressWarnings("unchecked")
List<LinkWithRoleModel> links = (List<LinkWithRoleModel>) LinkFeatureEditor.this
.getModelObject().value;
Set<String> roles = new HashSet<String>();
for (LinkWithRoleModel l : links) {
roles.add(l.role);
}
// Loop over values to see which of the tags are important and add them.
for (PossibleValue value : possibleValues) {
if (!value.isImportant() || !tagset.contains(value.getValue())) {
continue;
}
// Check if there is already a slot with the given name
if (roles.contains(value.getValue())) {
continue;
}
// Add empty slot in UI with that name.
LinkWithRoleModel m = new LinkWithRoleModel();
m.role = value.getValue();
// Marking so that can be ignored later.
m.autoCreated = true;
links.add(m);
// NOT arming the slot here!
}
}
public void setModelObject(FeatureModel aModel)
{
setDefaultModelObject(aModel);
}
public FeatureModel getModelObject()
{
return (FeatureModel) getDefaultModelObject();
}
@Override
public Component getFocusComponent()
{
return newRole;
}
@Override
public boolean isDropOrchoice()
{
return isDrop;
}
};
public void clearFeatures(AjaxRequestTarget aTarget)
{
featureModels = new ArrayList<>();
if (aTarget != null) {
aTarget.add(annotationFeatureForm);
}
}
private void populateFeatures(AnnotationLayer aLayer, FeatureStructure aFS,
Map<AnnotationFeature, Serializable> aRemembered)
{
clearFeatures(null);
// Populate from feature structure
for (AnnotationFeature feature : annotationService.listAnnotationFeature(aLayer)) {
if (!feature.isEnabled()) {
continue;
}
Serializable value = null;
if (aFS != null) {
value = (Serializable) BratAjaxCasUtil.getFeature(aFS, feature);
}
else if (aRemembered != null) {
value = aRemembered.get(feature);
}
if (WebAnnoConst.CHAIN_TYPE.equals(feature.getLayer().getType())) {
if (bModel.getSelection().isRelationAnno()) {
if (feature.getLayer().isLinkedListBehavior()
&& WebAnnoConst.COREFERENCE_RELATION_FEATURE.equals(feature
.getName())) {
featureModels.add(new FeatureModel(feature, value));
}
}
else {
if (WebAnnoConst.COREFERENCE_TYPE_FEATURE.equals(feature.getName())) {
featureModels.add(new FeatureModel(feature, value));
}
}
}
else {
featureModels.add(new FeatureModel(feature, value));
}
}
}
public void addRemainingTags(List<Tag> tagset, List<Tag> valuesFromTagset)
{
// adding the remaining part of tagset.
for (Tag remainingTag : valuesFromTagset) {
if (!tagset.contains(remainingTag)) {
tagset.add(remainingTag);
}
}
}
/*
* Compares existing tagset with possible values resulted from rule evaluation Adds only which
* exist in tagset and is suggested by rules. The remaining values from tagset are added
* afterwards.
*/
private static List<Tag> compareSortAndAdd(List<PossibleValue> possibleValues,
List<Tag> valuesFromTagset, RulesIndicator rulesIndicator)
{
//if no possible values, means didn't satisfy conditions
if(possibleValues.isEmpty())
{
rulesIndicator.didntMatchAnyRule();
}
List<Tag> returnList = new ArrayList<Tag>();
// Sorting based on important flag
// possibleValues.sort(null);
// Comparing to check which values suggested by rules exists in existing
// tagset and adding them first in list.
for (PossibleValue value : possibleValues) {
for (Tag tag : valuesFromTagset) {
if (value.getValue().equalsIgnoreCase(tag.getName())) {
//Matching values found in tagset and shown in dropdown
rulesIndicator.rulesApplied();
// HACK BEGIN
tag.setReordered(true);
// HACK END
//Avoid duplicate entries
if(!returnList.contains(tag)){
returnList.add(tag);
}
}
}
}
//If no matching tags found
if(returnList.isEmpty()){
rulesIndicator.didntMatchAnyTag();
}
return returnList;
}
public class LayerSelector
extends DropDownChoice<AnnotationLayer>
{
private static final long serialVersionUID = 2233133653137312264L;
public LayerSelector(String aId, List<? extends AnnotationLayer> aChoices)
{
super(aId, aChoices);
setOutputMarkupId(true);
setChoiceRenderer(new ChoiceRenderer<AnnotationLayer>("uiName"));
add(new AjaxFormComponentUpdatingBehavior("change")
{
private static final long serialVersionUID = 5179816588460867471L;
@Override
protected void onUpdate(AjaxRequestTarget aTarget)
{
// If "remember layer" is set, the we really just update the selected layer...
// we do not touch the selected annotation not the annotation detail panel
if (bModel.getPreferences().isRememberLayer()) {
bModel.setSelectedAnnotationLayer(getModelObject());
}
// If "remember layer" is not set, then changing the layer means that we want
// to change the type of the currently selected annotation
else if (
!bModel.getSelectedAnnotationLayer().equals(getModelObject()) &&
bModel.getSelection().getAnnotation().isSet())
{
if (bModel.getSelection().isRelationAnno()) {
try {
actionClear(aTarget, bModel);
}
catch (UIMAException | ClassNotFoundException | IOException
| BratAnnotationException e) {
error(e.getMessage());
}
}
else {
deleteModal.setContent(new DeleteOrReplaceAnnotationModalPanel(
deleteModal.getContentId(), bModel, deleteModal,
AnnotationDetailEditorPanel.this, getModelObject(), true));
deleteModal
.setWindowClosedCallback(new ModalWindow.WindowClosedCallback()
{
private static final long serialVersionUID = 4364820331676014559L;
@Override
public void onClose(AjaxRequestTarget target)
{
target.add(annotationFeatureForm);
}
});
deleteModal.show(aTarget);
}
}
// If no annotation is selected, then prime the annotation detail panel for the
// new type
else {
bModel.setSelectedAnnotationLayer(getModelObject());
selectedAnnotationLayer.setDefaultModelObject(getModelObject().getUiName());
aTarget.add(selectedAnnotationLayer);
clearFeatures(aTarget);
}
}
});
}
}
private FeatureModel getFeatureModel(AnnotationFeature aFeature)
{
for (FeatureModel f : featureModels) {
if (f.feature.getId() == aFeature.getId()) {
return f;
}
}
return null;
}
/**
* Represents a link with a role in the UI.
*/
public static class LinkWithRoleModel
implements Serializable
{
private static final long serialVersionUID = 2027345278696308900L;
public static final String CLICK_HINT = "<Click to activate>";
public String role;
public String label = CLICK_HINT;
public int targetAddr = -1;
public boolean autoCreated;
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((label == null) ? 0 : label.hashCode());
result = prime * result + ((role == null) ? 0 : role.hashCode());
result = prime * result + targetAddr;
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
LinkWithRoleModel other = (LinkWithRoleModel) obj;
if (label == null) {
if (other.label != null) {
return false;
}
}
else if (!label.equals(other.label)) {
return false;
}
if (role == null) {
if (other.role != null) {
return false;
}
}
else if (!role.equals(other.role)) {
return false;
}
if (targetAddr != other.targetAddr) {
return false;
}
return true;
}
}
private void updateForwardAnnotation(ActionContext aBModel) {
if (aBModel.getSelectedAnnotationLayer() != null
&& !aBModel.getSelectedAnnotationLayer().isLockToTokenOffset()) {
aBModel.setForwardAnnotation(false);// no forwarding for
// sub-/multitoken annotation
} else {
aBModel.setForwardAnnotation(aBModel.isForwardAnnotation());
}
}
public static class FeatureModel
implements Serializable
{
private static final long serialVersionUID = 3512979848975446735L;
public final AnnotationFeature feature;
public Serializable value;
public FeatureModel(AnnotationFeature aFeature, Serializable aValue)
{
feature = aFeature;
value = aValue;
// Avoid having null here because otherwise we have to handle null in zillion places!
if (value == null && MultiValueMode.ARRAY.equals(aFeature.getMultiValueMode())) {
value = new ArrayList<>();
}
}
}
private Map<String, String> getBindTags() {
AnnotationFeature f = annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0);
TagSet tagSet = f.getTagset();
Map<Character, String> tagNames = new LinkedHashMap<>();
Map<String, String> bindTag2Key = new LinkedHashMap<>();
for (Tag tag : annotationService.listTags(tagSet)) {
if (tagNames.containsKey(tag.getName().toLowerCase().charAt(0))) {
String oldBinding = tagNames.get(tag.getName().toLowerCase().charAt(0));
String newBinding = oldBinding + tag.getName().toLowerCase().charAt(0);
tagNames.put(tag.getName().toLowerCase().charAt(0), newBinding);
bindTag2Key.put(newBinding, tag.getName());
} else {
tagNames.put(tag.getName().toLowerCase().charAt(0), tag.getName().toLowerCase().substring(0, 1));
bindTag2Key.put(tag.getName().toLowerCase().substring(0, 1), tag.getName());
}
}
return bindTag2Key;
}
private String getKeyBindValue(String aKey, Map<String, String> aBindTags){
// check if all the key pressed are the same character
// if not, just check a Tag for the last char pressed
if(aKey.isEmpty()){
return aBindTags.get(aBindTags.keySet().iterator().next());
}
char prevC = aKey.charAt(0);
for(char ch:aKey.toCharArray()){
if(ch!=prevC){
break;
}
}
if (aBindTags.get(aKey)!=null){
return aBindTags.get(aKey);
}
// re-cycle suggestions
if(aBindTags.containsKey(aKey.substring(0,1))){
selectedTag = aKey.substring(0,1);
return aBindTags.get(aKey.substring(0,1));
}
// set it to the first in the tag list , when arbitrary key is pressed
return aBindTags.get(aBindTags.keySet().iterator().next());
}
public void reset(AjaxRequestTarget aTarget)
{
bModel.getSelection().clear();
bModel.getSelection().setBegin(0);
bModel.getSelection().setEnd(0);
clearFeatures(aTarget);
}
public void refresh(AjaxRequestTarget aTarget)
throws BratAnnotationException
{
try {
if (!bModel.getSelection().isRelationAnno()) {
updateLayersDropdown(bModel);
}
JCas aJCas = getCas(bModel);
if (bModel.getSelection().isRelationAnno()) {
arcSelected(aTarget, aJCas);
}
else {
spanSelected(aTarget, aJCas);
}
updateRememberLayer();
aTarget.add(annotationFeatureForm);
}
catch (BratAnnotationException e) {
throw e;
}
catch (Exception e) {
throw new BratAnnotationException(e);
}
}
private void updateRememberLayer()
{
if (bModel.getPreferences().isRememberLayer()) {
if (bModel.getDefaultAnnotationLayer() == null) {
bModel.setDefaultAnnotationLayer(bModel.getSelectedAnnotationLayer());
}
}
else if (!bModel.getSelection().isRelationAnno()) {
bModel.setDefaultAnnotationLayer(bModel.getSelectedAnnotationLayer());
}
// if no layer is selected in Settings
if (bModel.getSelectedAnnotationLayer() != null) {
selectedAnnotationLayer.setDefaultModelObject(
bModel.getSelectedAnnotationLayer().getUiName());
}
}
/**
* remove this model, if new annotation is to be created
*/
public void clearArmedSlotModel()
{
for (FeatureModel fm : featureModels) {
if (StringUtils.isNotBlank(fm.feature.getLinkTypeName())) {
fm.value = new ArrayList<>();
}
}
}
private Set<AnnotationFS> getAttachedRels(JCas aJCas, AnnotationFS aFs, AnnotationLayer aLayer) throws UIMAException, ClassNotFoundException, IOException{
Set<AnnotationFS> toBeDeleted = new HashSet<AnnotationFS>();
for (AnnotationLayer relationLayer : annotationService
.listAttachedRelationLayers(aLayer)) {
ArcAdapter relationAdapter = (ArcAdapter) getAdapter(annotationService,
relationLayer);
Type relationType = CasUtil.getType(aJCas.getCas(), relationLayer.getName());
Feature sourceFeature = relationType.getFeatureByBaseName(relationAdapter
.getSourceFeatureName());
Feature targetFeature = relationType.getFeatureByBaseName(relationAdapter
.getTargetFeatureName());
// This code is already prepared for the day that relations can go between
// different layers and may have different attach features for the source and
// target layers.
Feature relationSourceAttachFeature = null;
Feature relationTargetAttachFeature = null;
if (relationAdapter.getAttachFeatureName() != null) {
relationSourceAttachFeature = sourceFeature.getRange().getFeatureByBaseName(
relationAdapter.getAttachFeatureName());
relationTargetAttachFeature = targetFeature.getRange().getFeatureByBaseName(
relationAdapter.getAttachFeatureName());
}
for (AnnotationFS relationFS : CasUtil.select(aJCas.getCas(), relationType)) {
// Here we get the annotations that the relation is pointing to in the UI
FeatureStructure sourceFS;
if (relationSourceAttachFeature != null) {
sourceFS = relationFS.getFeatureValue(sourceFeature).getFeatureValue(
relationSourceAttachFeature);
}
else {
sourceFS = relationFS.getFeatureValue(sourceFeature);
}
FeatureStructure targetFS;
if (relationTargetAttachFeature != null) {
targetFS = relationFS.getFeatureValue(targetFeature).getFeatureValue(
relationTargetAttachFeature);
}
else {
targetFS = relationFS.getFeatureValue(targetFeature);
}
if (isSame(sourceFS, aFs) || isSame(targetFS, aFs)) {
toBeDeleted.add(relationFS);
LOG.debug("Deleted relation [" + getAddr(relationFS) + "] from layer ["
+ relationLayer.getName() + "]");
}
}
}
return toBeDeleted;
}
public AnnotationFeatureForm getAnnotationFeatureForm()
{
return annotationFeatureForm;
}
public Label getSelectedAnnotationLayer()
{
return selectedAnnotationLayer;
}
private boolean isFeatureModelChanged(AnnotationLayer aLayer){
for(FeatureModel fM: featureModels){
if(!annotationService.listAnnotationFeature(aLayer).contains(fM.feature)){
return true;
}
}
return false;
}
private boolean isForwardable() {
if (bModel.getSelectedAnnotationLayer() == null) {
return false;
}
if (bModel.getSelectedAnnotationLayer().getId() <= 0) {
return false;
}
if (!bModel.getSelectedAnnotationLayer().getType().equals(WebAnnoConst.SPAN_TYPE)) {
return false;
}
if (!bModel.getSelectedAnnotationLayer().isLockToTokenOffset()) {
return false;
}
// no forward annotation for multifeature layers.
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).size()>1){
return false;
}
// if there are no features at all, no forward annotation
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).isEmpty()){
return false;
}
// we allow forward annotation only for a feature with a tagset
if(annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0).getTagset()==null){
return false;
}
TagSet tagSet = annotationService.listAnnotationFeature(bModel.getSelectedAnnotationLayer()).get(0).getTagset();
// there should be at least one tag in the tagset
if(annotationService.listTags(tagSet).size()==0){
return false;
}
return true;
}
private static String generateMessage(AnnotationLayer aLayer, String aLabel, boolean aDeleted)
{
String action = aDeleted ? "deleted" : "created/updated";
String msg = "The [" + aLayer.getUiName() + "] annotation has been " + action + ".";
if (StringUtils.isNotBlank(aLabel)) {
msg += " Label: [" + aLabel + "]";
}
return msg;
}
class StyledComboBox<T>
extends ComboBox<T>
{
private static final long serialVersionUID = 1L;
public StyledComboBox(String id, IModel<String> model, List<T> choices)
{
super(id, model, choices);
}
public StyledComboBox(String string, List<T> choices)
{
super(string, choices);
}
@Override
protected void onInitialize()
{
super.onInitialize();
add(new Behavior() {
private static final long serialVersionUID = -5674186692106167407L;
@Override
public void renderHead(Component aComponent, IHeaderResponse aResponse)
{
super.renderHead(aComponent, aResponse);
// Force-remove KendoDataSource header item if there already is one. This allows
// Wicket to re-declare the datasource for the callback URL of the new instance
// of this feature editor.
// This causes all the choices to be transferred again, but at least tags added
// to open tagsets appear immediately in the dropdown list and constraints
// apply (hopefully).
// Note: this must be done here instead of before the call to super such that
// first the old datasource declarations are removed and then the new one is
// added and remains in the HTML. Here we rely on the fact that the feature
// editors have a fixed markup ID (which we also rely on for restoring focus).
aResponse.render(new PriorityHeaderItem(JavaScriptHeaderItem.forScript(
"$('head script[id=kendo-datasource_" +
StyledComboBox.this.getMarkupId() + "]').remove();",
null)));
}
});
}
@Override
protected IJQueryTemplate newTemplate()
{
return new IJQueryTemplate()
{
private static final long serialVersionUID = 1L;
/**
* Marks the reordered entries in bold.
* Same as text feature editor.
*/
@Override
public String getText()
{
// Some docs on how the templates work in Kendo, in case we need
// more fancy dropdowns
// http://docs.telerik.com/kendo-ui/framework/templates/overview
StringBuilder sb = new StringBuilder();
sb.append("# if (data.reordered == 'true') { #");
sb.append("<div title=\"#: data.description #\"><b>#: data.name #</b></div>\n");
sb.append("# } else { #");
sb.append("<div title=\"#: data.description #\">#: data.name #</div>\n");
sb.append("# } #");
return sb.toString();
}
@Override
public List<String> getTextProperties()
{
return Arrays.asList("name", "description", "reordered");
}
};
}
}
} | #430 - Refactoring project structure
- Correcting comment text
| webanno-brat/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/annotation/component/AnnotationDetailEditorPanel.java | #430 - Refactoring project structure - Correcting comment text | <ide><path>ebanno-brat/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/annotation/component/AnnotationDetailEditorPanel.java
<ide>
<ide> protected void onChange(AjaxRequestTarget aTarget, ActionContext aBModel)
<ide> {
<del> // Overriden in BratAnnotator
<add> // Overriden in CurationPanel
<ide> }
<ide>
<ide> protected void onAutoForward(AjaxRequestTarget aTarget, ActionContext aBModel)
<ide> {
<del> // Overriden in BratAnnotator
<add> // Overriden in CurationPanel
<ide> }
<ide>
<ide> protected void onAnnotate(AjaxRequestTarget aTarget, ActionContext aModel) |
|
JavaScript | mit | 3cfe2c574ce5644f24caf9a82d45df60bca54f16 | 0 | jnwatts/rallycomputer,jnwatts/rallycomputer | function Rally() {
this.init();
this.ui = new RallyUI(this);
}
Rally.prototype = {
instructions: [ ],
instruction_map: new Map(),
init: function() {
var db = this.db = new Dexie('MyDatabase');
// Define a schema
db.version(1).stores({
instructions: [ 'id++', '&instr', 'raw_mlg', 'cas', 'delay', 'mlg', 'time'].join()
});
// Open the database
db.open()
.catch(function(error){ alert('Uh oh : ' + error); });
this.calculate();
this.cachedClockAdj = this.clockAdj();
},
cachedClockAdj: 0,
now: function() {
return Date.now() + this.cachedClockAdj;
},
calculate: function() {
var rally = this;
var prev = null;
return this.db.instructions.toArray(function (instructions) {
rally.instructions = instructions.sort(function (a,b) {
return a.instr - b.instr;
}).map(function (row) {
var instr = new RallyInstruction(row);
instr.calculate(rally, prev);
rally.instruction_map.set(parseFloat(instr.instr), instr);
instr.prev = prev;
if (prev) {
prev.next = instr;
}
prev = instr;
return instr;
});
rally.ui.renderInstructions();
});
},
instruction: function (instr) {
instr = parseFloat(instr);
if (this.instruction_map.has(instr)) {
return this.instruction_map.get(instr);
} else {
return null;
}
},
addInstruction: function() {
var row = new Object();
row.instr = Number.parseFloat(arguments[0]);
row.raw_mlg = null;
row.cas = null;
row.delay = null;
row.mlg = null;
row.time = null;
switch (arguments.length) {
case 6:
row.time = Number.parseInt(arguments[5]);
case 5:
row.mlg = Number.parseFloat(arguments[4]);
case 4:
row.delay = Number.parseFloat(arguments[3]);
case 3:
row.cas = Number.parseInt(arguments[2]);
case 2:
row.raw_mlg = Number.parseFloat(arguments[1]);
break;
}
var rally = this;
return this.db.instructions.put(row).then(function () {
return rally.calculate().then(function() {
return row.instr;
});
});
},
addNextInstruction: function() {
var keys = Object.keys(this.instructions);
var instr = 1;
if (keys.length > 0) {
var last = keys[keys.length - 1];
instr = Number.parseFloat(last) + 1;
}
return this.addInstruction(instr);
},
setValue: function (id, col_index, val) {
var rally = this;
var col = RallyInstruction.prototype.columnDefs[col_index];
var obj = {};
obj[col.name] = val;
this.db.instructions.update(id, obj).then(function () { rally.calculate(); }).catch(function (err) {
console.log(instr);
console.log(obj);
debugger
});
},
deleteInstruction: function(id) {
var rally = this;
this.db.instructions.where('id').equals(id).delete().then(function () {rally.calculate();});
},
odomFactor: function(val) {
if (arguments.length > 0) {
this.setConfig('odom_factor', val);
}
val = this.getConfig('odom_factor');
if (val == null) {
val = 1;
}
return Number.parseFloat(val);
},
casFactor: function(val) {
if (arguments.length > 0) {
this.setConfig('cas_factor', val);
}
val = this.getConfig('cas_factor');
if (val == null) {
val = 1;
}
return Number.parseFloat(val);
},
rallySpeed: function(val) {
if (arguments.length > 0) {
this.setConfig('rally_speed', val);
}
val = this.getConfig('rally_speed');
if (val == null) {
val = 1;
}
return Number.parseInt(val);
},
clockAdj: function(val) {
if (arguments.length > 0) {
this.setConfig('clock_adj', val);
this.cachedClockAdj = this.clockAdj();
}
val = this.getConfig('clock_adj');
if (val == null) {
val = 0;
}
return Number.parseInt(val);
},
timeSeconds: function(val) {
if (arguments.length > 0) {
this.setConfig('time_seconds', Boolean(val));
}
val = this.getConfig('time_seconds');
if (val == null) {
val = true;
} else {
val = (val.toLowerCase() == "true");
}
return Boolean(val);
},
adjustMilleage: function(val) {
return Number.parseFloat(val) * this.odomFactor();
},
adjustCAS: function(val) {
return Number.parseFloat(val) * this.casFactor();
},
getConfig: function(name) {
return window.localStorage.getItem(name);
},
setConfig: function(name, value) {
return window.localStorage.setItem(name, value);
},
reset: function() {
this.db.delete();
this.init();
},
};
function RallyInstruction(row) {
Object.assign(this, row, {
columns: [],
prev: null,
next: null,
});
var rally = this;
this.columns = this.columnDefs.map(function (d) { return d.cloneWith(rally); });
}
RallyInstruction.prototype = {
columnDefs: [],
col: function(index) {
var result = null;
var int_index = -1;
if (typeof index == 'string') {
int_index = this.columns.findIndex(function (v) { return (v.name == index); });
} else {
int_index = index;
}
if (int_index < 0 || int_index >= this.columns.length) {
throw new Error('Invalid index: ' + index);
}
return this.columns[int_index];
},
formatMilleage: function(val, places) {
return Math.round(val * 1000) / 1000;
},
formatDeltaTime: function(seconds) {
var minutes = ~~(seconds / 60);
return minutes + ':' + Math.round(seconds * 100) / 100;
},
calculate: function(rally, prev) {
var instr = this.col('instr');
var raw_mlg = this.col('raw_mlg');
var raw_d_mlg = this.col('raw_d_mlg');
var mlg = this.col('mlg');
var d_mlg = this.col('d_mlg');
var cas = this.col('cas');
var delay = this.col('delay');
var tod = this.col('tod');
var time = this.col('time');
var d_time = this.col('d_time');
if (prev) {
p = new Object();
p.instr = prev.col('instr');
p.raw_mlg = prev.col('raw_mlg');
p.raw_d_mlg = prev.col('raw_d_mlg');
p.mlg = prev.col('mlg');
p.d_mlg = prev.col('d_mlg');
p.cas = prev.col('cas');
p.delay = prev.col('delay');
p.tod = prev.col('tod');
p.time = prev.col('time');
p.d_time = prev.col('d_time');
prev = p;
}
this.columns.forEach(function (c) {
if (c.isSet()) {
c.calculated_value = c.value;
}
});
var CalcPrev = function(col, default_value, calc_cb) {
this.calc = function() {
if (col.isSet()) {
col.calculated_value = col.value;
} else if (prev && calc_cb) {
col.calculated_value = calc_cb()
} else {
col.calculated_value = default_value;
}
};
};
var CalcCur = function(col, calc_cb) {
this.calc = function() {
col.calculated_value = calc_cb();
};
};
calcFunctions = [
new CalcPrev(raw_d_mlg, 0, function () {
var result = raw_mlg.calculated_value - prev.raw_mlg.calculated_value;
return (result < 0 ? 0 : result);
}),
new CalcCur(d_mlg, function() { return rally.adjustMilleage(raw_d_mlg.calculated_value); }),
new CalcPrev(mlg, 0, function () {
if (raw_mlg.calculated_value == 0) {
return 0;
} else {
return prev.mlg.calculated_value + d_mlg.calculated_value;
}
}),
new CalcPrev(cas, NaN, function () { return prev.cas.calculated_value; }),
new CalcPrev(delay, 0, null),
new CalcPrev(d_time, 0, function () { return (raw_d_mlg.calculated_value * 3600) / prev.cas.calculated_value + delay.calculated_value; }),
new CalcPrev(time, 0, function () { return prev.time.calculated_value + d_time.calculated_value; }),
new CalcPrev(tod, 0, function () {
return prev.tod.calculated_value + (d_time.calculated_value * 1000);
}),
new CalcPrev(raw_mlg, 0, function () {
if (mlg.isSet() && raw_mlg.value == null) {
return prev.raw_mlg.calculated_value + (mlg.calculated_value - prev.mlg.calculated_value);
} else {
return 0;
}
}),
];
calcFunctions.forEach(function (f) {
f.calc();
});
},
};
RallyInstruction.prototype.Column = function(index, name, label, is_db, format_cb) {
this.index = index;
this.name = name;
this.label = label;
this.is_db = is_db;
this.format_cb = format_cb;
Object.defineProperty(this, 'value', {
get: function() {
if (this.instance) {
return this.format_cb(this.instance[this.name]);
}
},
set: function(v) {
if (this.instance) {
this.instance[this.name] = this.format_cb(v);
}
},
});
Object.defineProperty(this, 'display_value', {
get: function() {
if (this.instance) {
return (this.isSet() ? this.value : this.calculated_value);
}
},
});
};
RallyInstruction.prototype.Column.prototype = {
index: null,
name: null,
label: null,
is_db: null,
calculated_value: null,
instance: null,
cloneWith: function(instance) {
var clone = new RallyInstruction.prototype.Column();
Object.assign(clone, this);
clone.instance = instance;
return clone;
},
isCalculated: function() {
if (!this.instance) {
throw "Instance not set";
}
return this.value == null;
},
isSet: function() {
if (!this.instance) {
throw "Instance not set";
}
return this.is_db && this.value != null;
},
toString: function() {
return this.display_value;
},
};
RallyInstruction.prototype.parseFloat = function(v) {
var result = Number.parseFloat(v);
if (isNaN(result)) {
result = null;
}
return result;
}
RallyInstruction.prototype.parseInt = function(v) {
var result = Number.parseInt(v);
if (isNaN(result)) {
result = null;
}
return result;
}
RallyInstruction.prototype.columnDefs = [
new RallyInstruction.prototype.Column(0, 'instr', 'Instr', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(1, 'raw_mlg', 'Raw Mlg', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(2, 'raw_d_mlg', 'Raw ΔMlg', false, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(3, 'mlg', 'Mlg', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(4, 'd_mlg', 'ΔMlg', false, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(5, 'cas', 'CAS', true, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(6, 'delay', 'Delay', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(7, 'tod', 'TOD', true, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(8, 'time', 'Time', false, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(9, 'd_time', 'ΔTime', false, RallyInstruction.prototype.parseFloat),
];
| rally.js | function Rally() {
this.init();
this.ui = new RallyUI(this);
}
Rally.prototype = {
instructions: [ ],
instruction_map: new Map(),
init: function() {
var db = this.db = new Dexie('MyDatabase');
// Define a schema
db.version(1).stores({
instructions: [ 'id++', '&instr', 'raw_mlg', 'cas', 'delay', 'mlg', 'time'].join()
});
// Open the database
db.open()
.catch(function(error){ alert('Uh oh : ' + error); });
this.calculate();
this.cachedClockAdj = this.clockAdj();
},
cachedClockAdj: 0,
now: function() {
return Date.now() + this.cachedClockAdj;
},
calculate: function() {
var rally = this;
var prev = null;
return this.db.instructions.toArray(function (instructions) {
rally.instructions = instructions.sort(function (a,b) {
return a.instr - b.instr;
}).map(function (row) {
var instr = new RallyInstruction(row);
instr.calculate(rally, prev);
rally.instruction_map.set(parseFloat(instr.instr), instr);
instr.prev = prev;
if (prev) {
prev.next = instr;
}
prev = instr;
return instr;
});
rally.ui.renderInstructions();
});
},
instruction: function (instr) {
instr = parseFloat(instr);
if (this.instruction_map.has(instr)) {
return this.instruction_map.get(instr);
} else {
return null;
}
},
addInstruction: function() {
var row = new Object();
row.instr = Number.parseFloat(arguments[0]);
row.raw_mlg = null;
row.cas = null;
row.delay = null;
row.mlg = null;
row.time = null;
switch (arguments.length) {
case 6:
row.time = Number.parseInt(arguments[5]);
case 5:
row.mlg = Number.parseFloat(arguments[4]);
case 4:
row.delay = Number.parseFloat(arguments[3]);
case 3:
row.cas = Number.parseInt(arguments[2]);
case 2:
row.raw_mlg = Number.parseFloat(arguments[1]);
break;
}
var rally = this;
return this.db.instructions.put(row).then(function () {
return rally.calculate().then(function() {
return row.instr;
});
});
},
addNextInstruction: function() {
var keys = Object.keys(this.instructions);
var instr = 1;
if (keys.length > 0) {
var last = keys[keys.length - 1];
instr = Number.parseFloat(last) + 1;
}
return this.addInstruction(instr);
},
setValue: function (id, col_index, val) {
var rally = this;
var col = RallyInstruction.prototype.columnDefs[col_index];
var obj = {};
obj[col.name] = val;
this.db.instructions.update(id, obj).then(function () { rally.calculate(); }).catch(function (err) {
console.log(instr);
console.log(obj);
debugger
});
},
deleteInstruction: function(id) {
var rally = this;
this.db.instructions.where('id').equals(id).delete().then(function () {rally.calculate();});
},
odomFactor: function(val) {
if (arguments.length > 0) {
this.setConfig('odom_factor', val);
}
val = this.getConfig('odom_factor');
if (val == null) {
val = 1;
}
return Number.parseFloat(val);
},
casFactor: function(val) {
if (arguments.length > 0) {
this.setConfig('cas_factor', val);
}
val = this.getConfig('cas_factor');
if (val == null) {
val = 1;
}
return Number.parseFloat(val);
},
rallySpeed: function(val) {
if (arguments.length > 0) {
this.setConfig('rally_speed', val);
}
val = this.getConfig('rally_speed');
if (val == null) {
val = 1;
}
return Number.parseInt(val);
},
clockAdj: function(val) {
if (arguments.length > 0) {
this.setConfig('clock_adj', val);
this.cachedClockAdj = this.clockAdj();
}
val = this.getConfig('clock_adj');
if (val == null) {
val = 0;
}
return Number.parseInt(val);
},
timeSeconds: function(val) {
if (arguments.length > 0) {
this.setConfig('time_seconds', Boolean(val));
}
val = this.getConfig('time_seconds');
if (val == null) {
val = true;
} else {
val = (val.toLowerCase() == "true");
}
return Boolean(val);
},
adjustMilleage: function(val) {
return Number.parseFloat(val) * this.odomFactor();
},
adjustCAS: function(val) {
return Number.parseFloat(val) * this.casFactor();
},
getConfig: function(name) {
return window.localStorage.getItem(name);
},
setConfig: function(name, value) {
return window.localStorage.setItem(name, value);
},
reset: function() {
this.db.delete();
this.init();
},
};
function RallyInstruction(row) {
Object.assign(this, row, {
columns: [],
prev: null,
next: null,
});
var rally = this;
this.columns = this.columnDefs.map(function (d) { return d.cloneWith(rally); });
}
RallyInstruction.prototype = {
columnDefs: [],
col: function(index) {
var result = null;
var int_index = -1;
if (typeof index == 'string') {
int_index = this.columns.findIndex(function (v) { return (v.name == index); });
} else {
int_index = index;
}
if (int_index < 0 || int_index >= this.columns.length) {
throw new Error('Invalid index: ' + index);
}
return this.columns[int_index];
},
formatMilleage: function(val, places) {
return Math.round(val * 1000) / 1000;
},
formatDeltaTime: function(seconds) {
var minutes = ~~(seconds / 60);
return minutes + ':' + Math.round(seconds * 100) / 100;
},
calculate: function(rally, prev) {
var instr = this.col('instr');
var raw_mlg = this.col('raw_mlg');
var raw_d_mlg = this.col('raw_d_mlg');
var mlg = this.col('mlg');
var d_mlg = this.col('d_mlg');
var cas = this.col('cas');
var delay = this.col('delay');
var tod = this.col('tod');
var time = this.col('time');
var d_time = this.col('d_time');
if (prev) {
p = new Object();
p.instr = prev.col('instr');
p.raw_mlg = prev.col('raw_mlg');
p.raw_d_mlg = prev.col('raw_d_mlg');
p.mlg = prev.col('mlg');
p.d_mlg = prev.col('d_mlg');
p.cas = prev.col('cas');
p.delay = prev.col('delay');
p.tod = prev.col('tod');
p.time = prev.col('time');
p.d_time = prev.col('d_time');
prev = p;
}
this.columns.forEach(function (c) {
if (c.isSet()) {
c.calculated_value = c.value;
}
});
var CalcPrev = function(col, default_value, calc_cb) {
this.calc = function() {
if (col.isSet()) {
col.calculated_value = col.value;
} else if (prev && calc_cb) {
col.calculated_value = calc_cb()
} else {
col.calculated_value = default_value;
}
};
};
var CalcCur = function(col, calc_cb) {
this.calc = function() {
col.calculated_value = calc_cb();
};
};
calcFunctions = [
new CalcPrev(raw_d_mlg, 0, function () {
var result = raw_mlg.calculated_value - prev.raw_mlg.calculated_value;
return (result < 0 ? 0 : result);
}),
new CalcCur(d_mlg, function() { return rally.adjustMilleage(raw_d_mlg.calculated_value); }),
new CalcPrev(mlg, 0, function () {
if (raw_mlg.calculated_value == 0) {
return 0;
} else {
return prev.mlg.calculated_value + d_mlg.calculated_value;
}
}),
new CalcPrev(cas, NaN, function () { return prev.cas.calculated_value; }),
new CalcPrev(delay, 0, null),
new CalcPrev(d_time, 0, function () { return (raw_d_mlg.calculated_value * 3600) / prev.cas.calculated_value + delay.calculated_value; }),
new CalcPrev(time, 0, function () { return prev.time.calculated_value + d_time.calculated_value; }),
new CalcPrev(tod, 0, function () {
return prev.tod.calculated_value + (d_time.calculated_value * 1000);
}),
new CalcPrev(raw_mlg, 0, function () {
if (mlg.isSet() && raw_mlg.value == null) {
return prev.raw_mlg.calculated_value + (mlg.calculated_value - prev.mlg.calculated_value);
} else {
return 0;
}
}),
];
calcFunctions.forEach(function (f) {
f.calc();
});
},
};
RallyInstruction.prototype.Column = function(index, name, label, is_db, format_cb) {
this.index = index;
this.name = name;
this.label = label;
this.is_db = is_db;
this.format_cb = format_cb;
Object.defineProperty(this, 'value', {
get: function() {
if (this.instance) {
return this.format_cb(this.instance[this.name]);
}
},
set: function(v) {
if (this.instance) {
this.instance[this.name] = this.format_cb(v);
}
},
});
Object.defineProperty(this, 'display_value', {
get: function() {
if (this.instance) {
return (this.isSet() ? this.value : this.calculated_value);
}
},
});
};
RallyInstruction.prototype.Column.prototype = {
index: null,
name: null,
label: null,
is_db: null,
calculated_value: null,
instance: null,
cloneWith: function(instance) {
var clone = new RallyInstruction.prototype.Column();
Object.assign(clone, this);
clone.instance = instance;
return clone;
},
isCalculated: function() {
if (!this.instance) {
throw "Instance not set";
}
return this.value == null;
},
isSet: function() {
if (!this.instance) {
throw "Instance not set";
}
return this.is_db && this.value != null;
},
toString: function() {
return this.display_value;
},
};
RallyInstruction.prototype.parseFloat = function(v) {
var result = Number.parseFloat(v);
if (isNaN(result)) {
result = null;
}
return result;
}
RallyInstruction.prototype.parseInt = function(v) {
var result = Number.parseInt(v);
if (isNaN(result)) {
result = null;
}
return result;
}
RallyInstruction.prototype.parseTime = function(v) {
var result = moment(v, ["h-m-s", "h-m", "H-m A"]);
if (result.isValid()) {
return result.valueOf();
} else {
return null;
}
}
RallyInstruction.prototype.columnDefs = [
new RallyInstruction.prototype.Column(0, 'instr', 'Instr', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(1, 'raw_mlg', 'Raw Mlg', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(2, 'raw_d_mlg', 'Raw ΔMlg', false, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(3, 'mlg', 'Mlg', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(4, 'd_mlg', 'ΔMlg', false, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(5, 'cas', 'CAS', true, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(6, 'delay', 'Delay', true, RallyInstruction.prototype.parseFloat),
new RallyInstruction.prototype.Column(7, 'tod', 'TOD', true, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(8, 'time', 'Time', false, RallyInstruction.prototype.parseInt),
new RallyInstruction.prototype.Column(9, 'd_time', 'ΔTime', false, RallyInstruction.prototype.parseFloat),
];
| Rally: Remove unused parseTime
| rally.js | Rally: Remove unused parseTime | <ide><path>ally.js
<ide> return result;
<ide> }
<ide>
<del>RallyInstruction.prototype.parseTime = function(v) {
<del> var result = moment(v, ["h-m-s", "h-m", "H-m A"]);
<del> if (result.isValid()) {
<del> return result.valueOf();
<del> } else {
<del> return null;
<del> }
<del>}
<del>
<ide> RallyInstruction.prototype.columnDefs = [
<ide> new RallyInstruction.prototype.Column(0, 'instr', 'Instr', true, RallyInstruction.prototype.parseFloat),
<ide> new RallyInstruction.prototype.Column(1, 'raw_mlg', 'Raw Mlg', true, RallyInstruction.prototype.parseFloat), |
|
Java | bsd-3-clause | 902b828958e4c4d5aa33ed18430573ddabc7ddce | 0 | wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.phases.common.inlining.info.elem;
import java.util.*;
import com.oracle.graal.api.meta.Constant;
import com.oracle.graal.api.meta.ResolvedJavaMethod;
import com.oracle.graal.compiler.common.type.Stamp;
import com.oracle.graal.debug.Debug;
import com.oracle.graal.graph.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.phases.common.CanonicalizerPhase;
import com.oracle.graal.phases.common.DeadCodeEliminationPhase;
import com.oracle.graal.phases.common.inlining.InliningUtil;
import com.oracle.graal.phases.tiers.HighTierContext;
import static com.oracle.graal.compiler.common.GraalOptions.OptCanonicalizer;
public class InlineableGraph implements Inlineable {
private final StructuredGraph graph;
public InlineableGraph(final ResolvedJavaMethod method, final Invoke invoke, final HighTierContext context, CanonicalizerPhase canonicalizer) {
StructuredGraph original = buildGraph(method, context, canonicalizer);
// TODO copying the graph is only necessary if it is modified or if it contains any invokes
this.graph = original.copy();
specializeGraphToArguments(invoke, context, canonicalizer);
}
/**
* @return a (possibly cached) graph. The caller is responsible for cloning before modification.
*/
private static StructuredGraph getOriginalGraph(final ResolvedJavaMethod method, final HighTierContext context) {
StructuredGraph intrinsicGraph = InliningUtil.getIntrinsicGraph(context.getReplacements(), method);
if (intrinsicGraph != null) {
return intrinsicGraph;
}
StructuredGraph cachedGraph = getCachedGraph(method, context);
if (cachedGraph != null) {
return cachedGraph;
}
return null;
}
/**
* This method looks up in a cache the graph for the argument, if not found bytecode is parsed.
* The graph thus obtained is returned, ie the caller is responsible for cloning before
* modification.
*/
private static StructuredGraph buildGraph(final ResolvedJavaMethod method, final HighTierContext context, CanonicalizerPhase canonicalizer) {
StructuredGraph result = getOriginalGraph(method, context);
if (result == null) {
result = parseBytecodes(method, context, canonicalizer);
}
return result;
}
/**
* @return true iff one or more parameters <code>newGraph</code> were specialized to account for
* a constant argument, or an argument with a more specific stamp.
*/
private boolean specializeGraphToArguments(final Invoke invoke, final HighTierContext context, CanonicalizerPhase canonicalizer) {
try (Debug.Scope s = Debug.scope("InlineGraph", graph)) {
ArrayList<Node> parameterUsages = replaceParamsWithMoreInformativeArguments(invoke, graph, context);
if (parameterUsages != null && OptCanonicalizer.getValue()) {
assert !parameterUsages.isEmpty() : "The caller didn't have more information about arguments after all";
canonicalizer.applyIncremental(graph, context, parameterUsages);
return true;
} else {
// TODO (chaeubl): if args are not more concrete, inlining should be avoided
// in most cases or we could at least use the previous graph size + invoke
// probability to check the inlining
return false;
}
} catch (Throwable e) {
throw Debug.handle(e);
}
}
private static boolean isArgMoreInformativeThanParam(ValueNode arg, ParameterNode param) {
if (arg.isConstant()) {
return true;
} else {
Stamp joinedStamp = param.stamp().join(arg.stamp());
if (joinedStamp != null && !joinedStamp.equals(param.stamp())) {
return true;
}
}
return false;
}
/**
* This method detects:
* <ul>
* <li>
* constants among the arguments to the <code>invoke</code></li>
* <li>
* arguments with more precise type than that declared by the corresponding parameter</li>
* </ul>
*
* <p>
* The corresponding parameters are updated to reflect the above information. Before doing so,
* their usages are added to <code>parameterUsages</code> for later incremental
* canonicalization.
* </p>
*
* @return null if no incremental canonicalization is need, a list of nodes for such
* canonicalization otherwise.
*/
private static ArrayList<Node> replaceParamsWithMoreInformativeArguments(final Invoke invoke, final StructuredGraph newGraph, final HighTierContext context) {
NodeInputList<ValueNode> args = invoke.callTarget().arguments();
ArrayList<Node> parameterUsages = null;
List<ParameterNode> params = newGraph.getNodes(ParameterNode.class).snapshot();
assert params.size() <= args.size();
/*
* param-nodes that aren't used (eg, as a result of canonicalization) don't occur in
* `params`. Thus, in general, the sizes of `params` and `args` don't always match. Still,
* it's always possible to pair a param-node with its corresponding arg-node using
* param.index() as index into `args`.
*/
for (ParameterNode param : params) {
if (param.usages().isNotEmpty()) {
ValueNode arg = args.get(param.index());
if (arg.isConstant()) {
Constant constant = arg.asConstant();
parameterUsages = trackParameterUsages(param, parameterUsages);
// collect param usages before replacing the param
newGraph.replaceFloating(param, ConstantNode.forConstant(constant, context.getMetaAccess(), newGraph));
// param-node gone, leaving a gap in the sequence given by param.index()
} else {
Stamp joinedStamp = param.stamp().join(arg.stamp());
if (joinedStamp != null && !joinedStamp.equals(param.stamp())) {
param.setStamp(joinedStamp);
parameterUsages = trackParameterUsages(param, parameterUsages);
} else {
assert !isArgMoreInformativeThanParam(arg, param);
}
}
}
}
assert (parameterUsages == null) || (!parameterUsages.isEmpty());
return parameterUsages;
}
private static ArrayList<Node> trackParameterUsages(ParameterNode param, ArrayList<Node> parameterUsages) {
ArrayList<Node> result = (parameterUsages == null) ? new ArrayList<>() : parameterUsages;
param.usages().snapshotTo(result);
return result;
}
private static StructuredGraph getCachedGraph(ResolvedJavaMethod method, HighTierContext context) {
if (context.getGraphCache() != null) {
StructuredGraph cachedGraph = context.getGraphCache().get(method);
if (cachedGraph != null) {
return cachedGraph;
}
}
return null;
}
/**
* This method builds the IR nodes for the given <code>method</code> and canonicalizes them.
* Provided profiling info is mature, the resulting graph is cached. The caller is responsible
* for cloning before modification.</p>
*/
private static StructuredGraph parseBytecodes(ResolvedJavaMethod method, HighTierContext context, CanonicalizerPhase canonicalizer) {
StructuredGraph newGraph = new StructuredGraph(method);
try (Debug.Scope s = Debug.scope("InlineGraph", newGraph)) {
if (context.getGraphBuilderSuite() != null) {
context.getGraphBuilderSuite().apply(newGraph, context);
}
assert newGraph.start().next() != null : "graph needs to be populated by the GraphBuilderSuite";
new DeadCodeEliminationPhase().apply(newGraph);
if (OptCanonicalizer.getValue()) {
canonicalizer.apply(newGraph, context);
}
if (context.getGraphCache() != null) {
context.getGraphCache().put(newGraph.method(), newGraph);
}
return newGraph;
} catch (Throwable e) {
throw Debug.handle(e);
}
}
@Override
public int getNodeCount() {
return graph.getNodeCount();
}
@Override
public Iterable<Invoke> getInvokes() {
return graph.getInvokes();
}
public StructuredGraph getGraph() {
return graph;
}
}
| graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/inlining/info/elem/InlineableGraph.java | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.phases.common.inlining.info.elem;
import java.util.*;
import com.oracle.graal.api.meta.Constant;
import com.oracle.graal.api.meta.ResolvedJavaMethod;
import com.oracle.graal.compiler.common.type.Stamp;
import com.oracle.graal.debug.Debug;
import com.oracle.graal.graph.*;
import com.oracle.graal.nodes.*;
import com.oracle.graal.phases.common.CanonicalizerPhase;
import com.oracle.graal.phases.common.DeadCodeEliminationPhase;
import com.oracle.graal.phases.common.inlining.InliningUtil;
import com.oracle.graal.phases.tiers.HighTierContext;
import static com.oracle.graal.compiler.common.GraalOptions.OptCanonicalizer;
public class InlineableGraph implements Inlineable {
private final StructuredGraph graph;
public InlineableGraph(final ResolvedJavaMethod method, final Invoke invoke, final HighTierContext context, CanonicalizerPhase canonicalizer) {
this.graph = buildGraph(method, context, canonicalizer);
specializeGraphToArguments(invoke, context, canonicalizer);
}
/**
* @return a (possibly cached) graph. The caller is responsible for cloning before modification.
*/
private static StructuredGraph getOriginalGraph(final ResolvedJavaMethod method, final HighTierContext context) {
StructuredGraph intrinsicGraph = InliningUtil.getIntrinsicGraph(context.getReplacements(), method);
if (intrinsicGraph != null) {
return intrinsicGraph;
}
StructuredGraph cachedGraph = getCachedGraph(method, context);
if (cachedGraph != null) {
return cachedGraph;
}
return null;
}
private static StructuredGraph buildGraph(final ResolvedJavaMethod method, final HighTierContext context, CanonicalizerPhase canonicalizer) {
StructuredGraph newGraph = getOriginalGraph(method, context);
if (newGraph == null) {
newGraph = parseBytecodes(method, context, canonicalizer);
}
// TODO (chaeubl): copying the graph is only necessary if it is modified or if it contains
// any invokes
return newGraph.copy();
}
/**
* @return true iff one or more parameters <code>newGraph</code> were specialized to account for
* a constant argument, or an argument with a more specific stamp.
*/
private boolean specializeGraphToArguments(final Invoke invoke, final HighTierContext context, CanonicalizerPhase canonicalizer) {
try (Debug.Scope s = Debug.scope("InlineGraph", graph)) {
ArrayList<Node> parameterUsages = replaceParamsWithMoreInformativeArguments(invoke, graph, context);
if (parameterUsages != null && OptCanonicalizer.getValue()) {
assert !parameterUsages.isEmpty() : "The caller didn't have more information about arguments after all";
canonicalizer.applyIncremental(graph, context, parameterUsages);
return true;
} else {
// TODO (chaeubl): if args are not more concrete, inlining should be avoided
// in most cases or we could at least use the previous graph size + invoke
// probability to check the inlining
return false;
}
} catch (Throwable e) {
throw Debug.handle(e);
}
}
private static boolean isArgMoreInformativeThanParam(ValueNode arg, ParameterNode param) {
if (arg.isConstant()) {
return true;
} else {
Stamp joinedStamp = param.stamp().join(arg.stamp());
if (joinedStamp != null && !joinedStamp.equals(param.stamp())) {
return true;
}
}
return false;
}
/**
* This method detects:
* <ul>
* <li>
* constants among the arguments to the <code>invoke</code></li>
* <li>
* arguments with more precise type than that declared by the corresponding parameter</li>
* </ul>
*
* <p>
* The corresponding parameters are updated to reflect the above information. Before doing so,
* their usages are added to <code>parameterUsages</code> for later incremental
* canonicalization.
* </p>
*
* @return null if no incremental canonicalization is need, a list of nodes for such
* canonicalization otherwise.
*/
private static ArrayList<Node> replaceParamsWithMoreInformativeArguments(final Invoke invoke, final StructuredGraph newGraph, final HighTierContext context) {
NodeInputList<ValueNode> args = invoke.callTarget().arguments();
ArrayList<Node> parameterUsages = null;
List<ParameterNode> params = newGraph.getNodes(ParameterNode.class).snapshot();
assert params.size() <= args.size();
/*
* param-nodes that aren't used (eg, as a result of canonicalization) don't occur in
* `params`. Thus, in general, the sizes of `params` and `args` don't always match. Still,
* it's always possible to pair a param-node with its corresponding arg-node using
* param.index() as index into `args`.
*/
for (ParameterNode param : params) {
if (param.usages().isNotEmpty()) {
ValueNode arg = args.get(param.index());
if (arg.isConstant()) {
Constant constant = arg.asConstant();
parameterUsages = trackParameterUsages(param, parameterUsages);
// collect param usages before replacing the param
newGraph.replaceFloating(param, ConstantNode.forConstant(constant, context.getMetaAccess(), newGraph));
// param-node gone, leaving a gap in the sequence given by param.index()
} else {
Stamp joinedStamp = param.stamp().join(arg.stamp());
if (joinedStamp != null && !joinedStamp.equals(param.stamp())) {
param.setStamp(joinedStamp);
parameterUsages = trackParameterUsages(param, parameterUsages);
} else {
assert !isArgMoreInformativeThanParam(arg, param);
}
}
}
}
assert (parameterUsages == null) || (!parameterUsages.isEmpty());
return parameterUsages;
}
private static ArrayList<Node> trackParameterUsages(ParameterNode param, ArrayList<Node> parameterUsages) {
ArrayList<Node> result = (parameterUsages == null) ? new ArrayList<>() : parameterUsages;
param.usages().snapshotTo(result);
return result;
}
private static StructuredGraph getCachedGraph(ResolvedJavaMethod method, HighTierContext context) {
if (context.getGraphCache() != null) {
StructuredGraph cachedGraph = context.getGraphCache().get(method);
if (cachedGraph != null) {
return cachedGraph;
}
}
return null;
}
/**
* This method builds the IR nodes for the given <code>method</code> and canonicalizes them.
* Provided profiling info is mature, the resulting graph is cached. The caller is responsible
* for cloning before modification.</p>
*/
private static StructuredGraph parseBytecodes(ResolvedJavaMethod method, HighTierContext context, CanonicalizerPhase canonicalizer) {
StructuredGraph newGraph = new StructuredGraph(method);
try (Debug.Scope s = Debug.scope("InlineGraph", newGraph)) {
if (context.getGraphBuilderSuite() != null) {
context.getGraphBuilderSuite().apply(newGraph, context);
}
assert newGraph.start().next() != null : "graph needs to be populated by the GraphBuilderSuite";
new DeadCodeEliminationPhase().apply(newGraph);
if (OptCanonicalizer.getValue()) {
canonicalizer.apply(newGraph, context);
}
if (context.getGraphCache() != null) {
context.getGraphCache().put(newGraph.method(), newGraph);
}
return newGraph;
} catch (Throwable e) {
throw Debug.handle(e);
}
}
@Override
public int getNodeCount() {
return graph.getNodeCount();
}
@Override
public Iterable<Invoke> getInvokes() {
return graph.getInvokes();
}
public StructuredGraph getGraph() {
return graph;
}
}
| [inliner] trickle up, thus making more visible, graph copying
| graal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/inlining/info/elem/InlineableGraph.java | [inliner] trickle up, thus making more visible, graph copying | <ide><path>raal/com.oracle.graal.phases.common/src/com/oracle/graal/phases/common/inlining/info/elem/InlineableGraph.java
<ide> private final StructuredGraph graph;
<ide>
<ide> public InlineableGraph(final ResolvedJavaMethod method, final Invoke invoke, final HighTierContext context, CanonicalizerPhase canonicalizer) {
<del> this.graph = buildGraph(method, context, canonicalizer);
<add> StructuredGraph original = buildGraph(method, context, canonicalizer);
<add> // TODO copying the graph is only necessary if it is modified or if it contains any invokes
<add> this.graph = original.copy();
<ide> specializeGraphToArguments(invoke, context, canonicalizer);
<ide> }
<ide>
<ide> return null;
<ide> }
<ide>
<add> /**
<add> * This method looks up in a cache the graph for the argument, if not found bytecode is parsed.
<add> * The graph thus obtained is returned, ie the caller is responsible for cloning before
<add> * modification.
<add> */
<ide> private static StructuredGraph buildGraph(final ResolvedJavaMethod method, final HighTierContext context, CanonicalizerPhase canonicalizer) {
<del> StructuredGraph newGraph = getOriginalGraph(method, context);
<del> if (newGraph == null) {
<del> newGraph = parseBytecodes(method, context, canonicalizer);
<del> }
<del> // TODO (chaeubl): copying the graph is only necessary if it is modified or if it contains
<del> // any invokes
<del> return newGraph.copy();
<add> StructuredGraph result = getOriginalGraph(method, context);
<add> if (result == null) {
<add> result = parseBytecodes(method, context, canonicalizer);
<add> }
<add> return result;
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | 6ebb06e0617fc677642c9c3df7588fb6023ec4ab | 0 | rPraml/org.openntf.domino,OpenNTF/org.openntf.domino,mariusj/org.openntf.domino,OpenNTF/org.openntf.domino,mariusj/org.openntf.domino,OpenNTF/org.openntf.domino,rPraml/org.openntf.domino,rPraml/org.openntf.domino,mariusj/org.openntf.domino,rPraml/org.openntf.domino | /*
* Copyright 2013
*
* @author Devin S. Olson ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package org.openntf.arpa;
/**
* @author dolson
*
*/
public enum ISO {
;
/**
* Carrier for ISO 3166-1 alpha 2 and alpha 3 code
*
* @author Devin S. Olsonm ([email protected])
*
* @see "ISO 3166-1-alpha-2 code" http://www.iso.org/iso/country_names_and_code_elements
*
*/
public static enum ISO3166 {
AF("AFG", "Afghanistan"), AX("ALA", "land Islands"), AL("ALB", "Albania"), DZ("DZA", "Algeria"), AS("ASM", "American Samoa"), AD(
"AND", "Andorra"), AO("AGO", "Angola"), AI("AIA", "Anguilla"), AQ("ATA", "Antarctica"), AG("ATG", "Antigua and Barbuda"), AR(
"ARG", "Argentina"), AM("ARM", "Armenia"), AW("ABW", "Aruba"), AU("AUS", "Australia"), AT("AUT", "Austria"), AZ("AZE",
"Azerbaijan"), BS("BHS", "Bahamas"), BH("BHR", "Bahrain"), BD("BGD", "Bangladesh"), BB("BRB", "Barbados"), BY("BLR",
"Belarus"), BE("BEL", "Belgium"), BZ("BLZ", "Belize"), BJ("BEN", "Benin"), BM("BMU", "Bermuda"), BT("BTN", "Bhutan"), BO(
"BOL", "Bolivia, Plurinational State of"), BQ("BES", "Bonaire, Sint Eustatius and Saba"), BA("BIH",
"Bosnia and Herzegovina"), BW("BWA", "Botswana"), BV("BVT", "Bouvet Island"), BR("BRA", "Brazil"), IO("IOT",
"British Indian Ocean Territory"), BN("BRN", "Brunei Darussalam"), BG("BGR", "Bulgaria"), BF("BFA", "Burkina Faso"), BI(
"BDI", "Burundi"), KH("KHM", "Cambodia"), CM("CMR", "Cameroon"), CA("CAN", "Canada"), CV("CPV", "Cape Verde"), KY("CYM",
"Cayman Islands"), CF("CAF", "Central African Republic"), TD("TCD", "Chad "), CL("CHL", "Chile"), CN("CHN", "China"), CX(
"CXR", "Christmas Island"), CC("CCK", "Cocos (Keeling) Islands"), CO("COL", "Colombia"), KM("COM", "Comoros"), CG("COG",
"Congo"), CD("COD", "Congo, the Democratic Republic of the"), CK("COK", "Cook Islands"), CR("CRI", "Costa Rica"), CI("CIV",
"Cte d'Ivoire"), HR("HRV", "Croatia"), CU("CUB", "Cuba"), CW("CUW", "Curaao"), CY("CYP", "Cyprus"), CZ("CZE",
"Czech Republic"), DK("DNK", "Denmark"), DJ("DJI", "Djibouti"), DM("DMA", "Dominica"), DO("DOM", "Dominican Republic"), EC(
"ECU", "Ecuador"), EG("EGY", "Egypt"), SV("SLV", "El Salvador"), GQ("GNQ", "Equatorial Guinea"), ER("ERI", "Eritrea"), EE(
"EST", "Estonia"), ET("ETH", "Ethiopia"), FK("FLK", "Falkland Islands (Malvinas)"), FO("FRO", "Faroe Islands"), FJ("FJI",
"Fiji"), FI("FIN", "Finland"), FR("FRA", "France"), GF("GUF", "French Guiana"), PF("PYF", "French Polynesia"), TF("ATF",
"French Southern Territories"), GA("GAB", "Gabon"), GM("GMB", "Gambia"), GE("GEO", "Georgia"), DE("DEU", "Germany"), GH(
"GHA", "Ghana"), GI("GIB", "Gibraltar"), GR("GRC", "Greece"), GL("GRL", "Greenland"), GD("GRD", "Grenada"), GP("GLP",
"Guadeloupe"), GU("GUM", "Guam"), GT("GTM", "Guatemala"), GG("GGY", "Guernsey"), GN("GIN", "Guinea"), GW("GNB",
"Guinea-Bissau"), GY("GUY", "Guyana"), HT("HTI", "Haiti"), HM("HMD", "Heard Island and McDonald Islands"), VA("VAT",
"Holy See (Vatican City State)"), HN("HND", "Honduras"), HK("HKG", "Hong Kong"), HU("HUN", "Hungary"), IS("ISL", "Iceland"), IN(
"IND", "India"), ID("IDN", "Indonesia"), IR("IRN", "Iran, Islamic Republic of"), IQ("IRQ", "Iraq"), IE("IRL", "Ireland"), IM(
"IMN", "Isle of Man"), IL("ISR", "Israel"), IT("ITA", "Italy"), JM("JAM", "Jamaica"), JP("JPN", "Japan"), JE("JEY",
"Jersey"), JO("JOR", "Jordan"), KZ("KAZ", "Kazakhstan"), KE("KEN", "Kenya"), KI("KIR", "Kiribati"), KP("PRK",
"Korea, Democratic People's Republic of"), KR("KOR", "Korea, Republic of"), KW("KWT", "Kuwait"), KG("KGZ", "Kyrgyzstan"), LA(
"LAO", "Lao People's Democratic Republic"), LV("LVA", "Latvia"), LB("LBN", "Lebanon"), LS("LSO", "Lesotho"), LR("LBR",
"Liberia"), LY("LBY", "Libya"), LI("LIE", "Liechtenstein"), LT("LTU", "Lithuania"), LU("LUX", "Luxembourg"), MO("MAC",
"Macao"), MK("MKD", "Macedonia, the former Yugoslav Republic of"), MG("MDG", "Madagascar"), MW("MWI", "Malawi"), MY("MYS",
"Malaysia"), MV("MDV", "Maldives"), ML("MLI", "Mali"), MT("MLT", "Malta"), MH("MHL", "Marshall Islands"), MQ("MTQ",
"Martinique"), MR("MRT", "Mauritania"), MU("MUS", "Mauritius"), YT("MYT", "Mayotte"), MX("MEX", "Mexico"), FM("FSM",
"Micronesia, Federated States of"), MD("MDA", "Moldova, Republic of"), MC("MCO", "Monaco"), MN("MNG", "Mongolia"), ME(
"MNE", "Montenegro"), MS("MSR", "Montserrat"), MA("MAR", "Morocco"), MZ("MOZ", "Mozambique"), MM("MMR", "Myanmar"), NA(
"NAM", "Namibia"), NR("NRU", "Nauru"), NP("NPL", "Nepal"), NL("NLD", "Netherlands"), NC("NCL", "New Caledonia"), NZ("NZL",
"New Zealand"), NI("NIC", "Nicaragua"), NE("NER", "Niger"), NG("NGA", "Nigeria"), NU("NIU", "Niue"), NF("NFK",
"Norfolk Island"), MP("MNP", "Northern Mariana Islands"), NO("NOR", "Norway"), OM("OMN", "Oman"), PK("PAK", "Pakistan"), PW(
"PLW", "Palau"), PS("PSE", "Palestine, State of"), PA("PAN", "Panama"), PG("PNG", "Papua New Guinea"), PY("PRY", "Paraguay"), PE(
"PER", "Peru"), PH("PHL", "Philippines"), PN("PCN", "Pitcairn"), PL("POL", "Poland"), PT("PRT", "Portugal"), PR("PRI",
"Puerto Rico"), QA("QAT", "Qatar"), RE("REU", "Runion"), RO("ROU", "Romania"), RU("RUS", "Russian Federation"), RW("RWA",
"Rwanda"), BL("BLM", "Saint Barthlemy"), SH("SHN", "Saint Helena, Ascension and Tristan da Cunha"), KN("KNA",
"Saint Kitts and Nevis"), LC("LCA", "Saint Lucia"), MF("MAF", "Saint Martin (French part)"), PM("SPM",
"Saint Pierre and Miquelon"), VC("VCT", "Saint Vincent and the Grenadines"), WS("WSM", "Samoa"), SM("SMR", "San Marino"), ST(
"STP", "Sao Tome and Principe"), SA("SAU", "Saudi Arabia"), SN("SEN", "Senegal"), RS("SRB", "Serbia"), SC("SYC",
"Seychelles"), SL("SLE", "Sierra Leone"), SG("SGP", "Singapore"), SX("SXM", "Sint Maarten (Dutch part)"), SK("SVK",
"Slovakia"), SI("SVN", "Slovenia"), SB("SLB", "Solomon Islands"), SO("SOM", "Somalia"), ZA("ZAF", "South Africa"), GS(
"SGS", "South Georgia and the South Sandwich Islands"), SS("SSD", "South Sudan"), ES("ESP", "Spain"), LK("LKA", "Sri Lanka"), SD(
"SDN", "Sudan"), SR("SUR", "Suriname"), SJ("SJM", "Svalbard and Jan Mayen"), SZ("SWZ", "Swaziland"), SE("SWE", "Sweden"), CH(
"CHE", "Switzerland"), SY("SYR", "Syrian Arab Republic"), TW("TWN", "Taiwan, Province of China"), TJ("TJK", "Tajikistan"), TZ(
"TZA", "Tanzania, United Republic of"), TH("THA", "Thailand"), TL("TLS", "Timor-Leste"), TG("TGO", "Togo"), TK("TKL",
"Tokelau"), TO("TON", "Tonga"), TT("TTO", "Trinidad and Tobago"), TN("TUN", "Tunisia"), TR("TUR", "Turkey"), TM("TKM",
"Turkmenistan"), TC("TCA", "Turks and Caicos Islands"), TV("TUV", "Tuvalu"), UG("UGA", "Uganda"), UA("UKR", "Ukraine"), AE(
"ARE", "United Arab Emirates"), GB("GBR", "United Kingdom"), US("USA", "United States"), UM("UMI",
"United States Minor Outlying Islands"), UY("URY", "Uruguay"), UZ("UZB", "Uzbekistan"), VU("VUT", "Vanuatu"), VE("VEN",
"Venezuela, Bolivarian Republic of"), VN("VNM", "Viet Nam"), VG("VGB", "Virgin Islands, British"), VI("VIR",
"Virgin Islands, U.S."), WF("WLF", "Wallis and Futuna"), EH("ESH", "Western Sahara"), YE("YEM", "Yemen"), ZM("ZMB",
"Zambia"), ZW("ZWE", "Zimbabwe");
private String _country;
private String _code3;
@Override
public String toString() {
return ISO3166.class.getName() + ": " + this.getCode2() + "(\"" + this.getCode3() + "\", \"" + this.getCountry() + "\")";
}
/**
* Gets the Country String
*
* @return the Country
*/
public String getCountry() {
return this._country;
}
/**
* Sets the Country String
*
* @param country
* the Country
*/
private void setCountry(final String country) {
this._country = country;
}
/**
* Gets the 2 digit Alpha Code
*
* @return 2 digit Alpha Code
*/
public String getCode2() {
return this.name();
}
/**
* Gets the 3 digit Alpha Code
*
* @return 3 digit Alpha Code
*/
public String getCode3() {
return this._code3;
}
/**
* Sets the 3 digit Alpha Code
*
* @param code3
* the 3 digit Alpha Code
*/
private void setCode3(final String code3) {
this._code3 = code3;
}
/**
* Instance Constructor
*
* @param country
* Country Name
* @param code3
* the 3 digit Alpha code
*/
private ISO3166(final String country, final String code3) {
this.setCountry(country);
this.setCode3(code3);
}
};
/*
* ******************************************************************
* ******************************************************************
*
* public utility methods
*
* ******************************************************************
* ******************************************************************
*/
/**
* Gets the ISO3166 enum for the specified code
*
* @param code
* 2 or 3 digit alpha code for the country
*
* @return ISO2166 enum for the specified code, if found. Null otherwise
*/
public static ISO3166 getISO3166(final String code) {
if (!isBlankString(code)) {
if ((2 == code.length()) && code.matches("^[A-Z]+[A-Z]$")) {
for (ISO3166 result : ISO3166.values()) {
if (code.equals(result.getCode2())) {
return result;
}
}
} else if ((3 == code.length()) && code.matches("^[A-Z]+[A-Z]+[A-Z]$")) {
for (ISO3166 result : ISO3166.values()) {
if (code.equals(result.getCode3())) {
return result;
}
}
}
}
return null;
}
/**
* Gets the Country String
*
* @param code
* 2 or 3 digit alpha code for the country
*
* @return the Country for the code, if found. Empty string "" otherwise.
*/
public static String getCountry(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? "" : temp.getCountry();
}
/**
* Determines if the code is a valid 2 digit country code
*
* @param code
* 2 digit alpha code for the country
*
* @return Flag indicating if the code is valid
*/
public static boolean isCountryCode2(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? false : temp.getCode2().equals(code);
}
/**
* Determines if the code is a valid 3 digit country code
*
* @param code
* 3 digit alpha code for the country
*
* @return Flag indicating if the code is valid
*/
public static boolean isCountryCode3(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? false : temp.getCode3().equals(code);
}
/**
* Converts a source string to Proper case (1st character uppercase, all others lowercase)
*
* @param string
* String to convert
*
* @return Converted string
*/
public static String toProperCase(final String string) {
return (ISO.isBlankString(string)) ? "" : string.substring(0, 1).toUpperCase() + string.substring(1).toLowerCase();
}
/**
* Determines if a string is null or blank
*
* @param string
* Source string to check for null or blank value.
*
* @return Flag indicating if the source string is null or blank.
*/
public static boolean isBlankString(final String string) {
return ((null == string) || (string.trim().length() < 1));
}
/**
* Checks a String to determine if it begins with a prefix.
*
* Performs a Case-INSENSITIVE check.
*
* <strong>Special Behavior</strong>: Returns false if source or prefix are null.
*
* @param source
* String to check if begins with prefix.
*
* @param prefix
* String to match agains the beginning of source.
*
* @return Flag indicating if source begins with prefix.
*/
public static boolean startsWithIgnoreCase(final String source, final String prefix) {
return ((null == source) || (null == prefix)) ? false : source.toLowerCase().startsWith(prefix.toLowerCase());
}
}
| org.openntf.domino/src/org/openntf/arpa/ISO.java | /*
* Copyright 2013
*
* @author Devin S. Olson ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package org.openntf.arpa;
/**
* @author dolson
*
*/
public enum ISO {
;
/**
* Carrier for ISO 3166-1 alpha 2 and alpha 3 code
*
* @author Devin S. Olsonm ([email protected])
*
* @see "ISO 3166-1-alpha-2 code" http://www.iso.org/iso/country_names_and_code_elements
*
*/
public static enum ISO3166 {
AF("AFG", "Afghanistan"), AX("ALA", "land Islands"), AL("ALB", "Albania"), DZ("DZA", "Algeria"), AS("ASM", "American Samoa"), AD(
"AND", "Andorra"), AO("AGO", "Angola"), AI("AIA", "Anguilla"), AQ("ATA", "Antarctica"), AG("ATG", "Antigua and Barbuda"), AR(
"ARG", "Argentina"), AM("ARM", "Armenia"), AW("ABW", "Aruba"), AU("AUS", "Australia"), AT("AUT", "Austria"), AZ("AZE",
"Azerbaijan"), BS("BHS", "Bahamas"), BH("BHR", "Bahrain"), BD("BGD", "Bangladesh"), BB("BRB", "Barbados"), BY("BLR",
"Belarus"), BE("BEL", "Belgium"), BZ("BLZ", "Belize"), BJ("BEN", "Benin"), BM("BMU", "Bermuda"), BT("BTN", "Bhutan"), BO(
"BOL", "Bolivia, Plurinational State of"), BQ("BES", "Bonaire, Sint Eustatius and Saba"), BA("BIH",
"Bosnia and Herzegovina"), BW("BWA", "Botswana"), BV("BVT", "Bouvet Island"), BR("BRA", "Brazil"), IO("IOT",
"British Indian Ocean Territory"), BN("BRN", "Brunei Darussalam"), BG("BGR", "Bulgaria"), BF("BFA", "Burkina Faso"), BI(
"BDI", "Burundi"), KH("KHM", "Cambodia"), CM("CMR", "Cameroon"), CA("CAN", "Canada"), CV("CPV", "Cape Verde"), KY("CYM",
"Cayman Islands"), CF("CAF", "Central African Republic"), TD("TCD", "Chad "), CL("CHL", "Chile"), CN("CHN", "China"), CX(
"CXR", "Christmas Island"), CC("CCK", "Cocos (Keeling) Islands"), CO("COL", "Colombia"), KM("COM", "Comoros"), CG("COG",
"Congo"), CD("COD", "Congo, the Democratic Republic of the"), CK("COK", "Cook Islands"), CR("CRI", "Costa Rica"), CI("CIV",
"Cte d'Ivoire"), HR("HRV", "Croatia"), CU("CUB", "Cuba"), CW("CUW", "Curaao"), CY("CYP", "Cyprus"), CZ("CZE",
"Czech Republic"), DK("DNK", "Denmark"), DJ("DJI", "Djibouti"), DM("DMA", "Dominica"), DO("DOM", "Dominican Republic"), EC(
"ECU", "Ecuador"), EG("EGY", "Egypt"), SV("SLV", "El Salvador"), GQ("GNQ", "Equatorial Guinea"), ER("ERI", "Eritrea"), EE(
"EST", "Estonia"), ET("ETH", "Ethiopia"), FK("FLK", "Falkland Islands (Malvinas)"), FO("FRO", "Faroe Islands"), FJ("FJI",
"Fiji"), FI("FIN", "Finland"), FR("FRA", "France"), GF("GUF", "French Guiana"), PF("PYF", "French Polynesia"), TF("ATF",
"French Southern Territories"), GA("GAB", "Gabon"), GM("GMB", "Gambia"), GE("GEO", "Georgia"), DE("DEU", "Germany"), GH(
"GHA", "Ghana"), GI("GIB", "Gibraltar"), GR("GRC", "Greece"), GL("GRL", "Greenland"), GD("GRD", "Grenada"), GP("GLP",
"Guadeloupe"), GU("GUM", "Guam"), GT("GTM", "Guatemala"), GG("GGY", "Guernsey"), GN("GIN", "Guinea"), GW("GNB",
"Guinea-Bissau"), GY("GUY", "Guyana"), HT("HTI", "Haiti"), HM("HMD", "Heard Island and McDonald Islands"), VA("VAT",
"Holy See (Vatican City State)"), HN("HND", "Honduras"), HK("HKG", "Hong Kong"), HU("HUN", "Hungary"), IS("ISL", "Iceland"), IN(
"IND", "India"), ID("IDN", "Indonesia"), IR("IRN", "Iran, Islamic Republic of"), IQ("IRQ", "Iraq"), IE("IRL", "Ireland"), IM(
"IMN", "Isle of Man"), IL("ISR", "Israel"), IT("ITA", "Italy"), JM("JAM", "Jamaica"), JP("JPN", "Japan"), JE("JEY",
"Jersey"), JO("JOR", "Jordan"), KZ("KAZ", "Kazakhstan"), KE("KEN", "Kenya"), KI("KIR", "Kiribati"), KP("PRK",
"Korea, Democratic People's Republic of"), KR("KOR", "Korea, Republic of"), KW("KWT", "Kuwait"), KG("KGZ", "Kyrgyzstan"), LA(
"LAO", "Lao People's Democratic Republic"), LV("LVA", "Latvia"), LB("LBN", "Lebanon"), LS("LSO", "Lesotho"), LR("LBR",
"Liberia"), LY("LBY", "Libya"), LI("LIE", "Liechtenstein"), LT("LTU", "Lithuania"), LU("LUX", "Luxembourg"), MO("MAC",
"Macao"), MK("MKD", "Macedonia, the former Yugoslav Republic of"), MG("MDG", "Madagascar"), MW("MWI", "Malawi"), MY("MYS",
"Malaysia"), MV("MDV", "Maldives"), ML("MLI", "Mali"), MT("MLT", "Malta"), MH("MHL", "Marshall Islands"), MQ("MTQ",
"Martinique"), MR("MRT", "Mauritania"), MU("MUS", "Mauritius"), YT("MYT", "Mayotte"), MX("MEX", "Mexico"), FM("FSM",
"Micronesia, Federated States of"), MD("MDA", "Moldova, Republic of"), MC("MCO", "Monaco"), MN("MNG", "Mongolia"), ME(
"MNE", "Montenegro"), MS("MSR", "Montserrat"), MA("MAR", "Morocco"), MZ("MOZ", "Mozambique"), MM("MMR", "Myanmar"), NA(
"NAM", "Namibia"), NR("NRU", "Nauru"), NP("NPL", "Nepal"), NL("NLD", "Netherlands"), NC("NCL", "New Caledonia"), NZ("NZL",
"New Zealand"), NI("NIC", "Nicaragua"), NE("NER", "Niger"), NG("NGA", "Nigeria"), NU("NIU", "Niue"), NF("NFK",
"Norfolk Island"), MP("MNP", "Northern Mariana Islands"), NO("NOR", "Norway"), OM("OMN", "Oman"), PK("PAK", "Pakistan"), PW(
"PLW", "Palau"), PS("PSE", "Palestine, State of"), PA("PAN", "Panama"), PG("PNG", "Papua New Guinea"), PY("PRY", "Paraguay"), PE(
"PER", "Peru"), PH("PHL", "Philippines"), PN("PCN", "Pitcairn"), PL("POL", "Poland"), PT("PRT", "Portugal"), PR("PRI",
"Puerto Rico"), QA("QAT", "Qatar"), RE("REU", "Runion"), RO("ROU", "Romania"), RU("RUS", "Russian Federation"), RW("RWA",
"Rwanda"), BL("BLM", "Saint Barthlemy"), SH("SHN", "Saint Helena, Ascension and Tristan da Cunha"), KN("KNA",
"Saint Kitts and Nevis"), LC("LCA", "Saint Lucia"), MF("MAF", "Saint Martin (French part)"), PM("SPM",
"Saint Pierre and Miquelon"), VC("VCT", "Saint Vincent and the Grenadines"), WS("WSM", "Samoa"), SM("SMR", "San Marino"), ST(
"STP", "Sao Tome and Principe"), SA("SAU", "Saudi Arabia"), SN("SEN", "Senegal"), RS("SRB", "Serbia"), SC("SYC",
"Seychelles"), SL("SLE", "Sierra Leone"), SG("SGP", "Singapore"), SX("SXM", "Sint Maarten (Dutch part)"), SK("SVK",
"Slovakia"), SI("SVN", "Slovenia"), SB("SLB", "Solomon Islands"), SO("SOM", "Somalia"), ZA("ZAF", "South Africa"), GS(
"SGS", "South Georgia and the South Sandwich Islands"), SS("SSD", "South Sudan"), ES("ESP", "Spain"), LK("LKA", "Sri Lanka"), SD(
"SDN", "Sudan"), SR("SUR", "Suriname"), SJ("SJM", "Svalbard and Jan Mayen"), SZ("SWZ", "Swaziland"), SE("SWE", "Sweden"), CH(
"CHE", "Switzerland"), SY("SYR", "Syrian Arab Republic"), TW("TWN", "Taiwan, Province of China"), TJ("TJK", "Tajikistan"), TZ(
"TZA", "Tanzania, United Republic of"), TH("THA", "Thailand"), TL("TLS", "Timor-Leste"), TG("TGO", "Togo"), TK("TKL",
"Tokelau"), TO("TON", "Tonga"), TT("TTO", "Trinidad and Tobago"), TN("TUN", "Tunisia"), TR("TUR", "Turkey"), TM("TKM",
"Turkmenistan"), TC("TCA", "Turks and Caicos Islands"), TV("TUV", "Tuvalu"), UG("UGA", "Uganda"), UA("UKR", "Ukraine"), AE(
"ARE", "United Arab Emirates"), GB("GBR", "United Kingdom"), US("USA", "United States"), UM("UMI",
"United States Minor Outlying Islands"), UY("URY", "Uruguay"), UZ("UZB", "Uzbekistan"), VU("VUT", "Vanuatu"), VE("VEN",
"Venezuela, Bolivarian Republic of"), VN("VNM", "Viet Nam"), VG("VGB", "Virgin Islands, British"), VI("VIR",
"Virgin Islands, U.S."), WF("WLF", "Wallis and Futuna"), EH("ESH", "Western Sahara"), YE("YEM", "Yemen"), ZM("ZMB",
"Zambia"), ZW("ZWE", "Zimbabwe");
private String _country;
private String _code3;
/**
* Gets the Country String
*
* @return the Country
*/
public String getCountry() {
return this._country;
}
/**
* Sets the Country String
*
* @param country
* the Country
*/
private void setCountry(final String country) {
this._country = country;
}
/**
* Gets the 2 digit Alpha Code
*
* @return 2 digit Alpha Code
*/
public String getCode2() {
return this.name();
}
/**
* Gets the 3 digit Alpha Code
*
* @return 3 digit Alpha Code
*/
public String getCode3() {
return this._code3;
}
/**
* Sets the 3 digit Alpha Code
*
* @param code3
* the 3 digit Alpha Code
*/
private void setCode3(final String code3) {
this._code3 = code3;
}
/**
* Instance Constructor
*
* @param country
* Country Name
* @param code3
* the 3 digit Alpha code
*/
private ISO3166(final String country, final String code3) {
this.setCountry(country);
this.setCode3(code3);
}
};
/*
* ******************************************************************
* ******************************************************************
*
* public utility methods
*
* ******************************************************************
* ******************************************************************
*/
/**
* Gets the ISO3166 enum for the specified code
*
* @param code
* 2 or 3 digit alpha code for the country
*
* @return ISO2166 enum for the specified code, if found. Null otherwise
*/
public static ISO3166 getISO3166(final String code) {
if (!isBlankString(code)) {
if ((2 == code.length()) && code.matches("^[A-Z]+[A-Z]$")) {
for (ISO3166 result : ISO3166.values()) {
if (code.equals(result.getCode2())) {
return result;
}
}
} else if ((3 == code.length()) && code.matches("^[A-Z]+[A-Z]+[A-Z]$")) {
for (ISO3166 result : ISO3166.values()) {
if (code.equals(result.getCode3())) {
return result;
}
}
}
}
return null;
}
/**
* Gets the Country String
*
* @param code
* 2 or 3 digit alpha code for the country
*
* @return the Country for the code, if found. Empty string "" otherwise.
*/
public static String getCountry(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? "" : temp.getCountry();
}
/**
* Determines if the code is a valid 2 digit country code
*
* @param code
* 2 digit alpha code for the country
*
* @return Flag indicating if the code is valid
*/
public static boolean isCountryCode2(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? false : temp.getCode2().equals(code);
}
/**
* Determines if the code is a valid 3 digit country code
*
* @param code
* 3 digit alpha code for the country
*
* @return Flag indicating if the code is valid
*/
public static boolean isCountryCode3(final String code) {
ISO3166 temp = ISO.getISO3166(code);
return (null == temp) ? false : temp.getCode3().equals(code);
}
/**
* Converts a source string to Proper case (1st character uppercase, all others lowercase)
*
* @param string
* String to convert
*
* @return Converted string
*/
public static String toProperCase(final String string) {
return (ISO.isBlankString(string)) ? "" : string.substring(0, 1).toUpperCase() + string.substring(1).toLowerCase();
}
/**
* Determines if a string is null or blank
*
* @param string
* Source string to check for null or blank value.
*
* @return Flag indicating if the source string is null or blank.
*/
public static boolean isBlankString(final String string) {
return ((null == string) || (string.trim().length() < 1));
}
}
| Added startsWithIgnoreCase() method, updated toString() method, completed javadoc comments.
| org.openntf.domino/src/org/openntf/arpa/ISO.java | Added startsWithIgnoreCase() method, updated toString() method, completed javadoc comments. | <ide><path>rg.openntf.domino/src/org/openntf/arpa/ISO.java
<ide> private String _country;
<ide> private String _code3;
<ide>
<add> @Override
<add> public String toString() {
<add> return ISO3166.class.getName() + ": " + this.getCode2() + "(\"" + this.getCode3() + "\", \"" + this.getCountry() + "\")";
<add> }
<add>
<ide> /**
<ide> * Gets the Country String
<ide> *
<ide> return ((null == string) || (string.trim().length() < 1));
<ide> }
<ide>
<add> /**
<add> * Checks a String to determine if it begins with a prefix.
<add> *
<add> * Performs a Case-INSENSITIVE check.
<add> *
<add> * <strong>Special Behavior</strong>: Returns false if source or prefix are null.
<add> *
<add> * @param source
<add> * String to check if begins with prefix.
<add> *
<add> * @param prefix
<add> * String to match agains the beginning of source.
<add> *
<add> * @return Flag indicating if source begins with prefix.
<add> */
<add> public static boolean startsWithIgnoreCase(final String source, final String prefix) {
<add> return ((null == source) || (null == prefix)) ? false : source.toLowerCase().startsWith(prefix.toLowerCase());
<add> }
<add>
<ide> } |
|
JavaScript | mit | f882cfbb84b874c05a290e577558e1ff824c342a | 0 | erasme/erajs | Ui.Linkable.extend('Ui.LinkButton',
/** @lends Ui.LinkButton# */
{
graphic: undefined,
/**
* @constructs
* @class A LinkButton is a button that is an hyper link
* @extends Ui.Linkable
* @param {String} [config.text] Button's text
* @param {String} [config.icon] Icon name
* @param {String} [config.orientation] 'vertical' or 'horizontal'
* @param {mixed} [config] see {@link Ui.Linkable} constructor for more options.
*/
constructor: function(config) {
this.graphic = new Ui.ButtonGraphic();
this.setContent(this.graphic);
this.connect(this, 'down', function() { this.graphic.setIsDown(true); });
this.connect(this, 'up', function() { this.graphic.setIsDown(false); });
this.connect(this, 'focus', function() { this.graphic.setColor(this.getStyleProperty('focusColor')); });
this.connect(this, 'blur', function() { this.graphic.setColor(this.getStyleProperty('color')); });
},
getText: function() {
return this.graphic.getText();
},
setText: function(text) {
this.graphic.setText(text);
},
getIcon: function() {
return this.graphic.getIcon();
},
setIcon: function(icon) {
this.graphic.setIcon(icon);
},
/** @return {String} Orientation */
getOrientation: function() {
return this.graphic.getOrientation();
},
/** @param {String} orientation can be 'vertical' or 'horizontal' */
setOrientation: function(orientation) {
this.graphic.setOrientation(orientation);
}
},
/** @lends Ui.LinkButton# */
{
onStyleChange: function() {
this.graphic.setRadius(this.getStyleProperty('radius'));
this.graphic.setSpacing(this.getStyleProperty('spacing'));
this.graphic.setIconSize(this.getStyleProperty('iconSize'));
if(this.getHasFocus())
this.graphic.setColor(this.getStyleProperty('focusColor'));
else
this.graphic.setColor(this.getStyleProperty('color'));
},
onDisable: function() {
Ui.Button.base.onDisable.call(this);
this.graphic.setIsEnable(false);
},
onEnable: function() {
Ui.Button.base.onEnable.call(this);
this.graphic.setIsEnable(true);
}
},
/** @lends Ui.LinkButton */
{
style: {
color: '#4fa8ff',
focusColor: '#f6caa2',
radius: 4,
spacing: 3,
iconSize: 24
}
});
| era/ui/linkbutton.js | Ui.Linkable.extend('Ui.LinkButton',
/** @lends Ui.LinkButton# */
{
graphic: undefined,
/**
* @constructs
* @class A LinkButton is a button that is an hyper link
* @extends Ui.Linkable
* @param {String} [config.text] Button's text
* @param {String} [config.icon] Icon name
* @param {String} [config.orientation] 'vertical' or 'horizontal'
* @param {mixed} [config] see {@link Ui.Linkable} constructor for more options.
*/
constructor: function(config) {
this.graphic = new Ui.ButtonGraphic();
this.setContent(this.graphic);
this.connect(this, 'down', function() { this.graphic.setIsDown(true); });
this.connect(this, 'up', function() { this.graphic.setIsDown(false); });
this.connect(this, 'focus', function() { this.graphic.setColor(this.getStyleProperty('focusColor')); });
this.connect(this, 'blur', function() { this.graphic.setColor(this.getStyleProperty('color')); });
},
getText: function() {
return this.graphic.getText();
},
setText: function(text) {
this.graphic.setText(text);
},
getIcon: function() {
return this.graphic.getIcon();
},
setIcon: function(icon) {
this.graphic.setIcon(icon);
},
/** @return {String} Orientation */
getOrientation: function() {
return this.graphic.getOrientation();
},
/** @param {String} orientation can be 'vertical' or 'horizontal' */
setOrientation: function(orientation) {
this.graphic.setOrientation(orientation);
}
},
/** @lends Ui.LinkButton# */
{
onStyleChange: function() {
this.graphic.setRadius(this.getStyleProperty('radius'));
this.graphic.setSpacing(this.getStyleProperty('spacing'));
if(this.getHasFocus())
this.graphic.setColor(this.getStyleProperty('focusColor'));
else
this.graphic.setColor(this.getStyleProperty('color'));
},
onDisable: function() {
Ui.Button.base.onDisable.call(this);
this.graphic.setIsEnable(false);
},
onEnable: function() {
Ui.Button.base.onEnable.call(this);
this.graphic.setIsEnable(true);
}
},
/** @lends Ui.LinkButton */
{
style: {
color: '#4fa8ff',
focusColor: '#f6caa2',
radius: 4,
spacing: 3
}
});
| add iconSize style for Ui.LinkButton
| era/ui/linkbutton.js | add iconSize style for Ui.LinkButton | <ide><path>ra/ui/linkbutton.js
<ide> onStyleChange: function() {
<ide> this.graphic.setRadius(this.getStyleProperty('radius'));
<ide> this.graphic.setSpacing(this.getStyleProperty('spacing'));
<add> this.graphic.setIconSize(this.getStyleProperty('iconSize'));
<ide> if(this.getHasFocus())
<ide> this.graphic.setColor(this.getStyleProperty('focusColor'));
<ide> else
<ide> color: '#4fa8ff',
<ide> focusColor: '#f6caa2',
<ide> radius: 4,
<del> spacing: 3
<add> spacing: 3,
<add> iconSize: 24
<ide> }
<ide> });
<ide> |
|
Java | mit | 28cdd4ab58f002f8d6986e9d339dc498b7157aef | 0 | DougEdey/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server,dbayub/SB_Elsinore_Server,DougEdey/SB_Elsinore_Server | package com.sb.elsinore;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
/**
*
*
*/
public class StatusRecorder implements Runnable {
private JSONObject lastStatus = null;
private long lastStatusTime = 0;
private String logFile = null;
private Thread thread;
private static final long SLEEP = 1000 * 5; // 5 seconds - is this too fast?
private long startTime = 0;
/**
* Start the thread.
*/
public final void start() {
if (thread == null || !thread.isAlive()) {
thread = new Thread(this);
thread.setDaemon(true);
thread.start();
}
}
/**
* Stop the thread.
*/
public final void stop() {
if (thread != null) {
thread.interrupt();
thread = null;
}
}
/**
* Main runnable, updates the files every five seconds.
*/
@Override
public final void run() {
//This will store multiple logs - one for raw data,
// one for each series (duty & temperature per vessel)
// For now - we'll store Duty, temperature vs time
//Assume new logs on each run
startTime = System.currentTimeMillis();
String directory = "graph-data/" + startTime + "/";
File directoryFile = new File(directory);
directoryFile.mkdirs();
//Generate a new log file under the current directory
logFile = directory + "raw.log";
File file = new File(this.logFile);
boolean fileExists = file.exists();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
try {
while (true) {
//Just going to record when something changes
String status = LaunchControl.getJSONStatus();
JSONObject newStatus = (JSONObject) JSONValue.parse(status);
if (lastStatus == null || isDifferent(lastStatus, newStatus)) {
//For now just log the whole status
//Eventually we may want multiple logs, etc.
writeToLog(newStatus, fileExists);
Date now = new Date();
if (lastStatus != null
&& now.getTime() - lastStatusTime > SLEEP) {
//Print out a point before now to make sure
// the plot lines are correct
printJsonToCsv(new Date(now.getTime() - SLEEP),
lastStatus, directory);
}
printJsonToCsv(now, newStatus, directory);
lastStatus = newStatus;
lastStatusTime = now.getTime();
fileExists = true;
}
Thread.sleep(SLEEP);
}
} catch (InterruptedException ex) {
BrewServer.LOG.warning("Status Recorder shutting down");
return;
//Don't do anything, this is how we close this out.
}
}
/**
* Save the status to the directory.
* @param now The current date to save the datapoint for.
* @param newStatus The JSON Status object to dump
* @param directory The graph data directory.
*/
protected final void printJsonToCsv(final Date now,
final JSONObject newStatus, final String directory) {
//Now look for differences in the temperature and duty
JSONArray vessels = (JSONArray) newStatus.get("vessels");
for (int x = 0; x < vessels.size(); x++) {
JSONObject vessel = (JSONObject) vessels.get(x);
if (vessel.containsKey("name")) {
String name = vessel.get("name").toString();
if (vessel.containsKey("tempprobe")) {
String temp = ((JSONObject) vessel.get("tempprobe"))
.get("temp").toString();
File tempFile = new File(directory + name + "-temp.csv");
appendToLog(tempFile, now.getTime() + "," + temp + "\r\n");
}
if (vessel.containsKey("pidstatus")) {
JSONObject pid = (JSONObject) vessel.get("pidstatus");
String duty = "0";
if (pid.containsKey("actualduty")) {
duty = pid.get("actualduty").toString();
} else if (!pid.get("mode").equals("off")) {
duty = pid.get("duty").toString();
}
File dutyFile = new File(directory + name + "-duty.csv");
appendToLog(dutyFile, now.getTime() + "," + duty + "\r\n");
}
}
}
}
/**
* Save the string to the log file.
* @param file The file object to save to
* @param toAppend The string to add to the file
*/
protected final void appendToLog(final File file, final String toAppend) {
FileWriter fileWriter = null;
try {
fileWriter = new FileWriter(file, true);
fileWriter.write(toAppend);
} catch (IOException ex) {
BrewServer.LOG.warning("Could not save to file: "
+ file.getAbsolutePath() + file.getName());
} finally {
try {
if (fileWriter != null) {
fileWriter.close();
}
} catch (IOException ex) {
BrewServer.LOG.warning("Could not close filewriter: "
+ file.getAbsolutePath() + file.getName());
}
}
}
/**
* Write a JSON object to the log file.
* @param status The JSON Object to log
* @param fileExists If the file exists, prepend a ","
* otherwise an open brace "["
*/
protected final void writeToLog(final JSONObject status,
final boolean fileExists) {
String append = fileExists ? "," : "[" + status.toJSONString();
appendToLog(new File(this.logFile), append);
}
/**
* Check to see if the objects are different.
* @param previous The first object to check.
* @param current The second object to check
* @return True if the objects are different
*/
protected final boolean isDifferent(final JSONObject previous,
final JSONObject current) {
if (previous.size() != current.size()) {
return true;
}
for (Iterator<Object> it = previous.keySet().iterator(); it.hasNext();)
{
Object key = it.next();
if (!"elapsed".equals(key)) {
Object previousValue = previous.get(key);
Object currentValue = current.get(key);
if (compare(previousValue, currentValue)) {
return true;
}
}
}
return false;
}
/**
* Check to see if the JSONArrays are different.
* @param previous The first JSONArray to check
* @param current The second JSONArray to check.
* @return True if the JSONArrays are different
*/
protected final boolean isDifferent(final JSONArray previous,
final JSONArray current) {
if (previous.size() != current.size()) {
return true;
}
for (int x = 0; x < previous.size(); x++) {
Object previousValue = previous.get(x);
Object currentValue = current.get(x);
if (compare(previousValue, currentValue)) {
return true;
}
}
return false;
}
/**
* Compare two generic objects.
* @param previousValue First object to check
* @param currentValue Second object to check
* @return True if the objects are different, false if the same.
*/
protected final boolean compare(final Object previousValue,
final Object currentValue) {
if (previousValue instanceof JSONObject
&& currentValue instanceof JSONObject) {
if (isDifferent((JSONObject) previousValue,
(JSONObject) currentValue)) {
return true;
}
} else if (previousValue instanceof JSONArray
&& currentValue instanceof JSONArray) {
if (isDifferent((JSONArray) previousValue,
(JSONArray) currentValue)) {
return true;
}
} else {
if (!previousValue.equals(currentValue)) {
return true;
}
}
return false;
}
}
| src/com/sb/elsinore/StatusRecorder.java | package com.sb.elsinore;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
/**
*
*
*/
public class StatusRecorder implements Runnable
{
private JSONObject lastStatus = null;
private long lastStatusTime = 0;
private String logFile = null;
private Thread thread;
private static final long SLEEP = 1000 * 5; // 5 seconds - is this too fast?
private long startTime = 0;
public void start()
{
if (thread == null || !thread.isAlive())
{
thread = new Thread(this);
thread.setDaemon(true);
thread.start();
}
}
public void stop()
{
if (thread != null)
{
thread.interrupt();
thread = null;
}
}
@Override
public void run()
{
//This will store multiple logs - one for raw data, one for each series (duty & temperature per vessel)
//For now - we'll store Duty, temperature vs time
//Assume new logs on each run
startTime = System.currentTimeMillis();
String directory = "graph-data/"+startTime+"/";
File directoryFile = new File(directory);
directoryFile.mkdirs();
//Generate a new log file under the current directory
logFile = directory+"raw.log";
File file = new File(this.logFile);
boolean fileExists = file.exists();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
try
{
while (true)
{
//Just going to record when something changes
String status = LaunchControl.getJSONStatus();
JSONObject newStatus = (JSONObject) JSONValue.parse(status);
if (lastStatus == null || isDifferent(lastStatus, newStatus))
{
//For now just log the whole status and we'll figure it out later. Eventually we may want multiple logs, etc.
writeToLog(newStatus, fileExists);
Date now = new Date();
if( lastStatus != null && now.getTime() - lastStatusTime > SLEEP )
{
//Print out a point before now to make sure the plot lines are correct
printJsonToCsv(new Date(now.getTime()-SLEEP), lastStatus, directory);
}
printJsonToCsv(now, newStatus, directory);
lastStatus = newStatus;
lastStatusTime = now.getTime();
fileExists = true;
}
Thread.sleep(SLEEP);
}
}
catch (InterruptedException ex)
{
ex.printStackTrace();
//Don't do anything, this is how we close this out.
}
}
protected void printJsonToCsv(Date now, JSONObject newStatus, String directory)
{
//Now look for differences in the temperature and duty
JSONArray vessels = (JSONArray)newStatus.get("vessels");
for (int x=0;x<vessels.size();x++)
{
JSONObject vessel = (JSONObject) vessels.get(x);
if( vessel.containsKey("name") )
{
String name = vessel.get("name").toString();
if( vessel.containsKey("tempprobe"))
{
String temp = ((JSONObject)vessel.get("tempprobe")).get("temp").toString();
File tempFile = new File(directory+name+"-temp.csv");
//appendToLog(tempFile, '"'+sdf.format(now)+"\","+temp+"\r\n");
appendToLog(tempFile, now.getTime()+","+temp+"\r\n");
}
if( vessel.containsKey("pidstatus"))
{
JSONObject pid = (JSONObject)vessel.get("pidstatus");
String duty = "0";
if( pid.containsKey("actualduty"))
{
duty = pid.get("actualduty").toString();
}
else if ( !pid.get("mode").equals("off"))
{
duty = pid.get("duty").toString();
}
File dutyFile = new File(directory+name+"-duty.csv");
//appendToLog(dutyFile, '"'+sdf.format(now)+"\","+duty+"\r\n");
appendToLog(dutyFile, now.getTime()+","+duty+"\r\n");
}
}
}
}
protected void appendToLog(File file, String toAppend)
{
FileWriter fileWriter = null;
try
{
fileWriter = new FileWriter(file, true);
fileWriter.write(toAppend);
}
catch (IOException ex)
{
ex.printStackTrace();
//TODO Log something
}
finally
{
try
{
if (fileWriter != null)
{
fileWriter.close();
}
}
catch (IOException ex)
{
ex.printStackTrace();
}
}
}
protected void writeToLog(JSONObject status, boolean fileExists)
{
String append = fileExists?",":"[" + status.toJSONString();
appendToLog(new File(this.logFile), append);
}
protected boolean isDifferent(JSONObject previous, JSONObject current)
{
if( previous.size() != current.size())
{
return true;
}
for (Iterator it = previous.keySet().iterator(); it.hasNext();)
{
Object key = it.next();
if( !"elapsed".equals(key) )
{
Object previousValue = previous.get(key);
Object currentValue = current.get(key);
if( compare(previousValue, currentValue) )
{
return true;
}
}
}
return false;
}
protected boolean isDifferent(JSONArray previous, JSONArray current)
{
if( previous.size() != current.size())
{
return true;
}
for (int x=0;x<previous.size();x++)
{
Object previousValue = previous.get(x);
Object currentValue = current.get(x);
if( compare(previousValue, currentValue) )
{
return true;
}
}
return false;
}
protected boolean compare(Object previousValue, Object currentValue)
{
if( previousValue instanceof JSONObject && currentValue instanceof JSONObject)
{
if( isDifferent((JSONObject) previousValue, (JSONObject) currentValue))
{
return true;
}
}
else if( previousValue instanceof JSONArray && currentValue instanceof JSONArray)
{
if( isDifferent((JSONArray) previousValue, (JSONArray) currentValue))
{
return true;
}
}
else
{
if( !previousValue.equals(currentValue))
{
return true;
}
}
return false;
}
}
| Update StatusRecorder to be cleaner for code styles
| src/com/sb/elsinore/StatusRecorder.java | Update StatusRecorder to be cleaner for code styles | <ide><path>rc/com/sb/elsinore/StatusRecorder.java
<ide> import java.io.IOException;
<ide> import java.text.SimpleDateFormat;
<ide> import java.util.Date;
<del>import java.util.HashMap;
<ide> import java.util.Iterator;
<ide> import org.json.simple.JSONArray;
<ide> import org.json.simple.JSONObject;
<ide> *
<ide> *
<ide> */
<del>public class StatusRecorder implements Runnable
<del>{
<add>public class StatusRecorder implements Runnable {
<ide>
<ide> private JSONObject lastStatus = null;
<ide> private long lastStatusTime = 0;
<ide> private Thread thread;
<ide> private static final long SLEEP = 1000 * 5; // 5 seconds - is this too fast?
<ide> private long startTime = 0;
<del>
<del>
<del> public void start()
<del> {
<del> if (thread == null || !thread.isAlive())
<del> {
<add>
<add> /**
<add> * Start the thread.
<add> */
<add> public final void start() {
<add> if (thread == null || !thread.isAlive()) {
<ide> thread = new Thread(this);
<ide> thread.setDaemon(true);
<ide> thread.start();
<ide>
<ide> }
<ide>
<del> public void stop()
<del> {
<del> if (thread != null)
<del> {
<add> /**
<add> * Stop the thread.
<add> */
<add> public final void stop() {
<add> if (thread != null) {
<ide> thread.interrupt();
<ide> thread = null;
<ide> }
<ide> }
<ide>
<add> /**
<add> * Main runnable, updates the files every five seconds.
<add> */
<ide> @Override
<del> public void run()
<del> {
<del> //This will store multiple logs - one for raw data, one for each series (duty & temperature per vessel)
<del> //For now - we'll store Duty, temperature vs time
<del>
<add> public final void run() {
<add> //This will store multiple logs - one for raw data,
<add> // one for each series (duty & temperature per vessel)
<add> // For now - we'll store Duty, temperature vs time
<add>
<ide> //Assume new logs on each run
<ide> startTime = System.currentTimeMillis();
<del>
<del> String directory = "graph-data/"+startTime+"/";
<del>
<add>
<add> String directory = "graph-data/" + startTime + "/";
<ide> File directoryFile = new File(directory);
<ide> directoryFile.mkdirs();
<del>
<add>
<ide> //Generate a new log file under the current directory
<del> logFile = directory+"raw.log";
<del>
<add> logFile = directory + "raw.log";
<add>
<ide> File file = new File(this.logFile);
<ide> boolean fileExists = file.exists();
<del>
<add>
<ide> SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
<del>
<del>
<del>
<del> try
<del> {
<del> while (true)
<del> {
<add>
<add> try {
<add> while (true) {
<ide> //Just going to record when something changes
<ide> String status = LaunchControl.getJSONStatus();
<ide> JSONObject newStatus = (JSONObject) JSONValue.parse(status);
<ide>
<del> if (lastStatus == null || isDifferent(lastStatus, newStatus))
<del> {
<del>
<del>
<del> //For now just log the whole status and we'll figure it out later. Eventually we may want multiple logs, etc.
<add> if (lastStatus == null || isDifferent(lastStatus, newStatus)) {
<add> //For now just log the whole status
<add> //Eventually we may want multiple logs, etc.
<ide> writeToLog(newStatus, fileExists);
<del>
<add>
<ide> Date now = new Date();
<del>
<del>
<del> if( lastStatus != null && now.getTime() - lastStatusTime > SLEEP )
<del> {
<del> //Print out a point before now to make sure the plot lines are correct
<del> printJsonToCsv(new Date(now.getTime()-SLEEP), lastStatus, directory);
<del>
<add>
<add> if (lastStatus != null
<add> && now.getTime() - lastStatusTime > SLEEP) {
<add> //Print out a point before now to make sure
<add> // the plot lines are correct
<add> printJsonToCsv(new Date(now.getTime() - SLEEP),
<add> lastStatus, directory);
<ide> }
<del>
<add>
<ide> printJsonToCsv(now, newStatus, directory);
<ide> lastStatus = newStatus;
<ide> lastStatusTime = now.getTime();
<del>
<add>
<ide> fileExists = true;
<ide> }
<ide>
<ide> Thread.sleep(SLEEP);
<ide>
<ide> }
<del> }
<del> catch (InterruptedException ex)
<del> {
<del> ex.printStackTrace();
<add> } catch (InterruptedException ex) {
<add> BrewServer.LOG.warning("Status Recorder shutting down");
<add> return;
<ide> //Don't do anything, this is how we close this out.
<ide> }
<ide>
<ide> }
<del>
<del> protected void printJsonToCsv(Date now, JSONObject newStatus, String directory)
<del> {
<add>
<add> /**
<add> * Save the status to the directory.
<add> * @param now The current date to save the datapoint for.
<add> * @param newStatus The JSON Status object to dump
<add> * @param directory The graph data directory.
<add> */
<add> protected final void printJsonToCsv(final Date now,
<add> final JSONObject newStatus, final String directory) {
<ide> //Now look for differences in the temperature and duty
<del> JSONArray vessels = (JSONArray)newStatus.get("vessels");
<del> for (int x=0;x<vessels.size();x++)
<del> {
<del> JSONObject vessel = (JSONObject) vessels.get(x);
<del> if( vessel.containsKey("name") )
<del> {
<del> String name = vessel.get("name").toString();
<del>
<del> if( vessel.containsKey("tempprobe"))
<del> {
<del> String temp = ((JSONObject)vessel.get("tempprobe")).get("temp").toString();
<del> File tempFile = new File(directory+name+"-temp.csv");
<del> //appendToLog(tempFile, '"'+sdf.format(now)+"\","+temp+"\r\n");
<del> appendToLog(tempFile, now.getTime()+","+temp+"\r\n");
<del> }
<del>
<del> if( vessel.containsKey("pidstatus"))
<del> {
<del> JSONObject pid = (JSONObject)vessel.get("pidstatus");
<del> String duty = "0";
<del> if( pid.containsKey("actualduty"))
<del> {
<del> duty = pid.get("actualduty").toString();
<del> }
<del> else if ( !pid.get("mode").equals("off"))
<del> {
<del> duty = pid.get("duty").toString();
<del> }
<del> File dutyFile = new File(directory+name+"-duty.csv");
<del> //appendToLog(dutyFile, '"'+sdf.format(now)+"\","+duty+"\r\n");
<del> appendToLog(dutyFile, now.getTime()+","+duty+"\r\n");
<del> }
<del>
<del> }
<add> JSONArray vessels = (JSONArray) newStatus.get("vessels");
<add> for (int x = 0; x < vessels.size(); x++) {
<add> JSONObject vessel = (JSONObject) vessels.get(x);
<add> if (vessel.containsKey("name")) {
<add> String name = vessel.get("name").toString();
<add>
<add> if (vessel.containsKey("tempprobe")) {
<add> String temp = ((JSONObject) vessel.get("tempprobe"))
<add> .get("temp").toString();
<add> File tempFile = new File(directory + name + "-temp.csv");
<add> appendToLog(tempFile, now.getTime() + "," + temp + "\r\n");
<add> }
<add>
<add> if (vessel.containsKey("pidstatus")) {
<add> JSONObject pid = (JSONObject) vessel.get("pidstatus");
<add> String duty = "0";
<add> if (pid.containsKey("actualduty")) {
<add> duty = pid.get("actualduty").toString();
<add> } else if (!pid.get("mode").equals("off")) {
<add> duty = pid.get("duty").toString();
<ide> }
<del> }
<del>
<del> protected void appendToLog(File file, String toAppend)
<del> {
<add> File dutyFile = new File(directory + name + "-duty.csv");
<add> appendToLog(dutyFile, now.getTime() + "," + duty + "\r\n");
<add> }
<add>
<add> }
<add> }
<add> }
<add>
<add> /**
<add> * Save the string to the log file.
<add> * @param file The file object to save to
<add> * @param toAppend The string to add to the file
<add> */
<add> protected final void appendToLog(final File file, final String toAppend) {
<ide> FileWriter fileWriter = null;
<del> try
<del> {
<add> try {
<ide> fileWriter = new FileWriter(file, true);
<ide> fileWriter.write(toAppend);
<del> }
<del> catch (IOException ex)
<del> {
<del> ex.printStackTrace();
<del> //TODO Log something
<del> }
<del> finally
<del> {
<del> try
<del> {
<del> if (fileWriter != null)
<del> {
<add> } catch (IOException ex) {
<add> BrewServer.LOG.warning("Could not save to file: "
<add> + file.getAbsolutePath() + file.getName());
<add> } finally {
<add> try {
<add> if (fileWriter != null) {
<ide> fileWriter.close();
<ide> }
<del> }
<del> catch (IOException ex)
<del> {
<del> ex.printStackTrace();
<del> }
<del> }
<del> }
<del>
<del> protected void writeToLog(JSONObject status, boolean fileExists)
<del> {
<del> String append = fileExists?",":"[" + status.toJSONString();
<add> } catch (IOException ex) {
<add> BrewServer.LOG.warning("Could not close filewriter: "
<add> + file.getAbsolutePath() + file.getName());
<add> }
<add> }
<add> }
<add>
<add> /**
<add> * Write a JSON object to the log file.
<add> * @param status The JSON Object to log
<add> * @param fileExists If the file exists, prepend a ","
<add> * otherwise an open brace "["
<add> */
<add> protected final void writeToLog(final JSONObject status,
<add> final boolean fileExists) {
<add> String append = fileExists ? "," : "[" + status.toJSONString();
<ide> appendToLog(new File(this.logFile), append);
<ide> }
<ide>
<del> protected boolean isDifferent(JSONObject previous, JSONObject current)
<del> {
<del>
<del> if( previous.size() != current.size())
<del> {
<add> /**
<add> * Check to see if the objects are different.
<add> * @param previous The first object to check.
<add> * @param current The second object to check
<add> * @return True if the objects are different
<add> */
<add> protected final boolean isDifferent(final JSONObject previous,
<add> final JSONObject current) {
<add> if (previous.size() != current.size()) {
<ide> return true;
<ide> }
<del>
<del>
<del> for (Iterator it = previous.keySet().iterator(); it.hasNext();)
<add>
<add> for (Iterator<Object> it = previous.keySet().iterator(); it.hasNext();)
<ide> {
<ide> Object key = it.next();
<del> if( !"elapsed".equals(key) )
<del> {
<add> if (!"elapsed".equals(key)) {
<ide> Object previousValue = previous.get(key);
<ide> Object currentValue = current.get(key);
<ide>
<del> if( compare(previousValue, currentValue) )
<del> {
<add> if (compare(previousValue, currentValue)) {
<ide> return true;
<ide> }
<ide> }
<del>
<del> }
<del>
<add> }
<ide> return false;
<ide> }
<del>
<del> protected boolean isDifferent(JSONArray previous, JSONArray current)
<del> {
<del>
<del>
<del> if( previous.size() != current.size())
<del> {
<add>
<add> /**
<add> * Check to see if the JSONArrays are different.
<add> * @param previous The first JSONArray to check
<add> * @param current The second JSONArray to check.
<add> * @return True if the JSONArrays are different
<add> */
<add> protected final boolean isDifferent(final JSONArray previous,
<add> final JSONArray current) {
<add>
<add> if (previous.size() != current.size()) {
<ide> return true;
<ide> }
<del>
<del>
<del> for (int x=0;x<previous.size();x++)
<del> {
<add>
<add> for (int x = 0; x < previous.size(); x++) {
<ide> Object previousValue = previous.get(x);
<ide> Object currentValue = current.get(x);
<del>
<del> if( compare(previousValue, currentValue) )
<del> {
<del> return true;
<del> }
<del>
<del> }
<del>
<add>
<add> if (compare(previousValue, currentValue)) {
<add> return true;
<add> }
<add> }
<ide> return false;
<ide> }
<del>
<del> protected boolean compare(Object previousValue, Object currentValue)
<del> {
<del> if( previousValue instanceof JSONObject && currentValue instanceof JSONObject)
<del> {
<del> if( isDifferent((JSONObject) previousValue, (JSONObject) currentValue))
<del> {
<del> return true;
<del> }
<del> }
<del> else if( previousValue instanceof JSONArray && currentValue instanceof JSONArray)
<del> {
<del> if( isDifferent((JSONArray) previousValue, (JSONArray) currentValue))
<del> {
<del> return true;
<del> }
<del> }
<del> else
<del> {
<del> if( !previousValue.equals(currentValue))
<del> {
<del> return true;
<del> }
<del> }
<del>
<add>
<add> /**
<add> * Compare two generic objects.
<add> * @param previousValue First object to check
<add> * @param currentValue Second object to check
<add> * @return True if the objects are different, false if the same.
<add> */
<add> protected final boolean compare(final Object previousValue,
<add> final Object currentValue) {
<add> if (previousValue instanceof JSONObject
<add> && currentValue instanceof JSONObject) {
<add> if (isDifferent((JSONObject) previousValue,
<add> (JSONObject) currentValue)) {
<add> return true;
<add> }
<add> } else if (previousValue instanceof JSONArray
<add> && currentValue instanceof JSONArray) {
<add> if (isDifferent((JSONArray) previousValue,
<add> (JSONArray) currentValue)) {
<add> return true;
<add> }
<add> } else {
<add> if (!previousValue.equals(currentValue)) {
<add> return true;
<add> }
<add> }
<add>
<ide> return false;
<ide> }
<del>
<del>
<del>
<ide> } |
|
JavaScript | mit | 960dd3a9d8d7e8c93332ccda190230ce9fffcb3e | 0 | twwongaq/teamform-seed,twwongaq/teamform-seed,twwongaq/teamform-seed | describe('teamform-admin-app', function() {
//
// Example: A test case of getRandomIntInclusive
//
describe('change size test', function() {
it('test change size', function() {
$scope.param.minTeamSize = 2;
$scope.param.maxTeamSize = 8;
$scope.changeMinTeamSize(3);
$scope.changeMaxTeamSize(-2);
expect($scope.param.minTeamSize).toEqual(5);
expect($scope.param.maxTeamSize).toEqual(6);
});
});
}); | app/unit_tests/test_admin.js | describe('teamform-admin-app', function() {
//
// Example: A test case of getRandomIntInclusive
//
var $scope;
describe('change min size test', function() {
it('test change size', function() {
var value = 2;
$scope.changeMinTeamSize(3);
expect( $scope.param.minTeamSize ).toEqual(5);
});
});
}); | Change size test for admin page
| app/unit_tests/test_admin.js | Change size test for admin page | <ide><path>pp/unit_tests/test_admin.js
<ide> //
<ide> // Example: A test case of getRandomIntInclusive
<ide> //
<del> var $scope;
<del> describe('change min size test', function() {
<add> describe('change size test', function() {
<ide>
<ide> it('test change size', function() {
<del> var value = 2;
<del> $scope.changeMinTeamSize(3);
<del> expect( $scope.param.minTeamSize ).toEqual(5);
<add> $scope.param.minTeamSize = 2;
<add> $scope.param.maxTeamSize = 8;
<add> $scope.changeMinTeamSize(3);
<add> $scope.changeMaxTeamSize(-2);
<add> expect($scope.param.minTeamSize).toEqual(5);
<add> expect($scope.param.maxTeamSize).toEqual(6);
<ide> });
<ide>
<ide> }); |
|
Java | bsd-3-clause | 2ddac22c7127f2e3c768d33c10f177621152dd9c | 0 | FieldFlux/orbit,DanielSperry/orbit,JoeHegarty/orbit,quilombodigital/orbit | /*
Copyright (C) 2015 Electronic Arts Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Electronic Arts, Inc. ("EA") nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY ELECTRONIC ARTS AND ITS CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS OR ITS CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.ea.orbit.actors.extensions.ldap;
import com.ea.orbit.actors.extensions.AbstractStorageExtension;
import com.ea.orbit.actors.runtime.ActorReference;
import com.ea.orbit.concurrent.Task;
import com.ea.orbit.exception.UncheckedException;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.DefaultModification;
import org.apache.directory.api.ldap.model.entry.Modification;
import org.apache.directory.api.ldap.model.entry.ModificationOperation;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class LdapStorageExtension extends AbstractStorageExtension
{
private String dn;
private String credentials;
private String host = "localhost";
private int port = 10389;
@Override
public Task<Void> start()
{
return Task.done();
}
@Override
public Task<Void> stop()
{
return Task.done();
}
@Override
public Task<Void> clearState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
connection = acquireConnection();
connection.delete(absoluteDn(reference, entity(state)));
}
catch (Exception ignored)
{
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.done();
}
@Override
@SuppressWarnings("unchecked")
public Task<Boolean> readState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
LdapEntity entity = entity(state);
connection = acquireConnection();
EntryCursor cursor = connection.search(absoluteDn(reference, entity), "(objectclass=*)", SearchScope.OBJECT, "*");
if (cursor.next())
{
Map<String, Field> map = getFieldAttributeMap(state.getClass());
for (String key : map.keySet())
{
map.get(key).set(state, cursor.get().get(key).get().getString());
}
}
else
{
return Task.fromValue(false);
}
}
catch (Exception e)
{
throw new UncheckedException(e);
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.fromValue(true);
}
private Map<String, Field> getFieldAttributeMap(Class clazz)
{
//TODO this is cacheable
Map<String, Field> map = new HashMap<>();
List<Field> fields = new ArrayList<>();
for (Class c = clazz; c != null && c != Object.class; c = c.getSuperclass())
{
final Field[] declaredFields = c.getDeclaredFields();
if (declaredFields != null && declaredFields.length > 0)
{
for (Field f : declaredFields)
{
fields.add(f);
}
}
}
fields.stream().filter(f -> Modifier.isPublic(f.getModifiers()) || f.isAnnotationPresent(LdapAttribute.class)).forEach(f -> {
LdapAttribute attrann = f.getAnnotation(LdapAttribute.class);
if (attrann == null)
{
map.put(f.getName(), f);
}
else
{
map.put(attrann.value() == null ? f.getName() : attrann.value(), f);
}
});
return map;
}
@Override
@SuppressWarnings("unchecked")
public Task<Void> writeState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
LdapEntity entity = entity(state);
List<String> attributes = new ArrayList(Arrays.asList(entity.attributes()));
Map<String, Field> map = getFieldAttributeMap(state.getClass());
for (String key : map.keySet())
{
attributes.add(key + ": " + map.get(key).get(state).toString());
}
connection = acquireConnection();
EntryCursor cursor = connection.search(absoluteDn(reference, entity), "(objectclass=*)", SearchScope.OBJECT, "*");
if (cursor.next())
{
List<Modification> modifications = new ArrayList<>();
for (String tmp : map.keySet())
{
String value = (String) map.get(tmp).get(state);
Modification mod = new DefaultModification(ModificationOperation.REPLACE_ATTRIBUTE, tmp, value);
modifications.add(mod);
}
connection.modify(cursor.get().getDn(), modifications.toArray(new Modification[0]));
}
else
{
connection.add(new DefaultEntry(absoluteDn(reference, entity), attributes.toArray()));
}
}
catch (Exception e)
{
throw new UncheckedException(e);
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.done();
}
private LdapConnection acquireConnection() throws LdapException, IOException
{
//TODO acquire from pool (exists at the ldap client api, change later)
LdapConnection connection = new LdapNetworkConnection(getHost(), getPort());
if (getCredentials() == null)
{
connection.bind(getDn());
}
else
{
connection.bind(getDn(), getCredentials());
}
return connection;
}
private void releaseConnection(LdapConnection connection)
{
//TODO release to pool (exists at the ldap client api, change later)
try
{
connection.close();
}
catch (Exception e)
{
e.printStackTrace();
}
}
private LdapEntity entity(Object state)
{
return state.getClass().getAnnotation(LdapEntity.class);
}
private String relativeDn(ActorReference reference, LdapEntity entity)
{
return entity.dnKey() + "=" + getIdentity(reference);
}
private String absoluteDn(ActorReference reference, LdapEntity entity)
{
return relativeDn(reference, entity) + ", " + entity.baseDn();
}
public String getDn()
{
return dn;
}
public void setDn(final String dn)
{
this.dn = dn;
}
public String getCredentials()
{
return credentials;
}
public void setCredentials(final String credentials)
{
this.credentials = credentials;
}
public String getHost()
{
return host;
}
public void setHost(final String host)
{
this.host = host;
}
public int getPort()
{
return port;
}
public void setPort(final int port)
{
this.port = port;
}
}
| actors/extensions/ldap/src/main/java/com/ea/orbit/actors/extensions/ldap/LdapStorageExtension.java | /*
Copyright (C) 2015 Electronic Arts Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Electronic Arts, Inc. ("EA") nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY ELECTRONIC ARTS AND ITS CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS OR ITS CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.ea.orbit.actors.extensions.ldap;
import com.ea.orbit.actors.extensions.AbstractStorageExtension;
import com.ea.orbit.actors.runtime.ActorReference;
import com.ea.orbit.concurrent.Task;
import com.ea.orbit.exception.UncheckedException;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.DefaultModification;
import org.apache.directory.api.ldap.model.entry.Modification;
import org.apache.directory.api.ldap.model.entry.ModificationOperation;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class LdapStorageExtension extends AbstractStorageExtension
{
private String dn;
private String credentials;
private String host = "localhost";
private int port = 10389;
@Override
public Task<Void> start()
{
return Task.done();
}
@Override
public Task<Void> stop()
{
return Task.done();
}
@Override
public Task<Void> clearState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
connection = acquireConnection();
connection.delete(absoluteDn(reference, entity(state)));
}
catch (Exception ignored)
{
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.done();
}
@Override
@SuppressWarnings("unchecked")
public Task<Boolean> readState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
LdapEntity entity = entity(state);
connection = acquireConnection();
EntryCursor cursor = connection.search(absoluteDn(reference, entity), "(objectclass=*)", SearchScope.OBJECT, "*");
if (cursor.next())
{
Map<String, Field> map = getFieldAttributeMap(state.getClass());
for (String key : map.keySet())
{
map.get(key).set(state, cursor.get().get(key).get().getString());
}
}
else
{
return Task.fromValue(false);
}
}
catch (Exception e)
{
throw new UncheckedException(e);
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.fromValue(true);
}
private Map<String, Field> getFieldAttributeMap(Class clazz)
{
//TODO this is cacheable
Map<String, Field> map = new HashMap<>();
List<Field> fields = new ArrayList<>();
for (Class c = clazz; c != null && c != Object.class; c = clazz.getSuperclass())
{
final Field[] declaredFields = c.getDeclaredFields();
if (declaredFields != null && declaredFields.length > 0)
{
for (Field f : declaredFields)
{
fields.add(f);
}
}
}
fields.stream().filter(f -> Modifier.isPublic(f.getModifiers()) || f.isAnnotationPresent(LdapAttribute.class)).forEach(f -> {
LdapAttribute attrann = f.getAnnotation(LdapAttribute.class);
if (attrann == null)
{
map.put(f.getName(), f);
}
else
{
map.put(attrann.value() == null ? f.getName() : attrann.value(), f);
}
});
return map;
}
@Override
@SuppressWarnings("unchecked")
public Task<Void> writeState(final ActorReference reference, final Object state)
{
LdapConnection connection = null;
try
{
LdapEntity entity = entity(state);
connection = acquireConnection();
List<String> attributes = new ArrayList(Arrays.asList(entity.attributes()));
Map<String, Field> map = getFieldAttributeMap(state.getClass());
for (String key : map.keySet())
{
attributes.add(key + ": " + map.get(key).get(state).toString());
}
EntryCursor cursor = connection.search(absoluteDn(reference, entity), "(objectclass=*)", SearchScope.OBJECT, "*");
if (cursor.next())
{
List<Modification> modifications = new ArrayList<>();
for (String tmp : map.keySet())
{
String value = (String) map.get(tmp).get(state);
Modification mod = new DefaultModification(ModificationOperation.REPLACE_ATTRIBUTE, tmp, value);
modifications.add(mod);
}
connection.modify(cursor.get().getDn(), modifications.toArray(new Modification[0]));
}
else
{
connection.add(new DefaultEntry(absoluteDn(reference, entity), attributes.toArray()));
}
}
catch (Exception e)
{
throw new UncheckedException(e);
}
finally
{
if (connection != null)
{
releaseConnection(connection);
}
}
return Task.done();
}
private LdapConnection acquireConnection() throws LdapException, IOException
{
//TODO acquire from pool (exists at the ldap client api, change later)
LdapConnection connection = new LdapNetworkConnection(getHost(), getPort());
if (getCredentials() == null)
{
connection.bind(getDn());
}
else
{
connection.bind(getDn(), getCredentials());
}
return connection;
}
private void releaseConnection(LdapConnection connection)
{
//TODO release to pool (exists at the ldap client api, change later)
try
{
connection.close();
}
catch (Exception e)
{
e.printStackTrace();
}
}
private LdapEntity entity(Object state)
{
return state.getClass().getAnnotation(LdapEntity.class);
}
private String relativeDn(ActorReference reference, LdapEntity entity)
{
return entity.dnKey() + "=" + getIdentity(reference);
}
private String absoluteDn(ActorReference reference, LdapEntity entity)
{
return relativeDn(reference, entity) + ", " + entity.baseDn();
}
public String getDn()
{
return dn;
}
public void setDn(final String dn)
{
this.dn = dn;
}
public String getCredentials()
{
return credentials;
}
public void setCredentials(final String credentials)
{
this.credentials = credentials;
}
public String getHost()
{
return host;
}
public void setHost(final String host)
{
this.host = host;
}
public int getPort()
{
return port;
}
public void setPort(final int port)
{
this.port = port;
}
}
| Fixing ldap get fields.
| actors/extensions/ldap/src/main/java/com/ea/orbit/actors/extensions/ldap/LdapStorageExtension.java | Fixing ldap get fields. | <ide><path>ctors/extensions/ldap/src/main/java/com/ea/orbit/actors/extensions/ldap/LdapStorageExtension.java
<ide> Map<String, Field> map = new HashMap<>();
<ide>
<ide> List<Field> fields = new ArrayList<>();
<del> for (Class c = clazz; c != null && c != Object.class; c = clazz.getSuperclass())
<add> for (Class c = clazz; c != null && c != Object.class; c = c.getSuperclass())
<ide> {
<ide> final Field[] declaredFields = c.getDeclaredFields();
<ide> if (declaredFields != null && declaredFields.length > 0)
<ide> try
<ide> {
<ide> LdapEntity entity = entity(state);
<del> connection = acquireConnection();
<ide>
<ide> List<String> attributes = new ArrayList(Arrays.asList(entity.attributes()));
<ide> Map<String, Field> map = getFieldAttributeMap(state.getClass());
<ide> attributes.add(key + ": " + map.get(key).get(state).toString());
<ide> }
<ide>
<add> connection = acquireConnection();
<ide> EntryCursor cursor = connection.search(absoluteDn(reference, entity), "(objectclass=*)", SearchScope.OBJECT, "*");
<ide> if (cursor.next())
<ide> { |
|
JavaScript | mit | 15c9f2976864250a9ac7b8759d81d7b50be71031 | 0 | tyanas/basisjs,fateevv/basisjs,fateevv/basisjs,fateevv/basisjs,tyanas/basisjs,smelukov/basisjs,basisjs/basisjs,istrel/basisjs,basisjs/basisjs,basisjs/basisjs,smelukov/basisjs,istrel/basisjs,istrel/basisjs,tyanas/basisjs,smelukov/basisjs |
basis.require('basis.event');
basis.require('basis.dom.event');
basis.require('basis.cssom');
basis.require('basis.ui');
basis.require('basis.animation');
/**
* @see ./demo/defile/scroller.html
* @namespace basis.ui.scroller
*/
var namespace = this.path;
//
// import names
//
var Event = basis.dom.event;
var cssom = basis.cssom;
var anim = basis.animation;
var createEvent = basis.event.create;
var Emitter = basis.event.Emitter;
var UINode = basis.ui.Node;
//
// definitions
//
var templates = basis.template.define(namespace, {
Scrollbar: resource('templates/scroller/Scrollbar.tmpl'),
ScrollPanel: resource('templates/scroller/ScrollPanel.tmpl'),
ScrollGalleryItem: resource('templates/scroller/ScrollGalleryItem.tmpl')
});
//
// Main part
//
// constants
var AVERAGE_TICK_TIME_INTERVAL = 15;
var VELOCITY_DECREASE_FACTOR = 0.94;
var MOVE_THRESHOLD = 5;
// css transform/transform3d feature detection
var TRANSFORM_SUPPORT = false;
var TRANSFORM_3D_SUPPORT = false;
var TRANSFORM_PROPERTY_NAME;
(function (){
var style = document.body.style;
function testProps(properties){
for (var i = 0, propertyName; propertyName = properties[i]; i++)
if (typeof style[propertyName] != 'undefined')
return propertyName;
return false;
}
TRANSFORM_PROPERTY_NAME = testProps([
'transform',
'WebkitTransform',
'msTransform',
'MozTransform',
'OTransform'
]);
if (TRANSFORM_PROPERTY_NAME)
TRANSFORM_SUPPORT = true;
//transform3d
if (TRANSFORM_SUPPORT)
{
var prop = testProps([
'perspectiveProperty',
'WebkitPerspective',
'MozPerspective',
'OPerspective',
'msPerspective'
]);
if (prop || 'webkitPerspective' in style)
TRANSFORM_3D_SUPPORT = true;
}
})();
/**
* @class
*/
var Scroller = Emitter.subclass({
className: namespace + '.Scroller',
minScrollDelta: 0,
scrollX: true,
scrollY: true,
event_start: createEvent('start'),
event_finish: createEvent('finish'),
event_startInertia: createEvent('startInertia'),
event_updatePosition: createEvent('updatePosition', 'scrollPosition'),
init: function(){
this.lastMouseX = 0;
this.lastMouseY = 0;
this.currentDirectionX = 0;
this.currentDirectionY = 0;
this.currentVelocityX = 0;
this.currentVelocityY = 0;
this.viewportX = 0;
this.viewportY = 0;
this.viewportTargetX = this.viewportX;
this.viewportTargetY = this.viewportY;
//this.lastViewportTargetX = this.viewportX;
//this.lastViewportTargetY = this.viewportY;
if (this.minScrollDelta == 0)
{
this.minScrollDeltaYReached = true;
this.minScrollDeltaXReached = true;
}
//time
this.updateFrameHandle = 0;
this.lastMotionUpdateTime = 0;
this.lastUpdateTime = 0;
this.startTime = 0;
//statuses
this.processInertia = false;
this.panningActive = false;
//init
Emitter.prototype.init.call(this);
var element = this.targetElement;
this.targetElement = null;
this.setElement(element);
this.onUpdateHandler = this.onUpdate.bind(this);
this.updateElementPosition = TRANSFORM_SUPPORT ? this.updatePosition_styleTransform : this.updatePosition_styleTopLeft;
},
setElement: function(element){
if (this.targetElement)
{
Event.removeHandler(this.targetElement, 'mousedown', this.onMouseDown, this);
Event.removeHandler(this.targetElement, 'touchstart', this.onMouseDown, this);
}
this.targetElement = element;
if (this.targetElement)
{
Event.addHandler(this.targetElement, 'mousedown', this.onMouseDown, this);
Event.addHandler(this.targetElement, 'touchstart', this.onMouseDown, this);
}
},
updatePosition_styleTopLeft: function(){
if (this.scrollX)
this.targetElement.style.left = -(this.viewportX) + 'px';
if (this.scrollY)
this.targetElement.style.top = -(this.viewportY) + 'px';
},
updatePosition_styleTransform: function(){
var deltaX = -(this.isUpdating ? this.viewportX : Math.round(this.viewportX)) + 'px';
var deltaY = -(this.isUpdating ? this.viewportY : Math.round(this.viewportY)) + 'px';
this.targetElement.style[TRANSFORM_PROPERTY_NAME] = 'translate(' + deltaX + ', ' + deltaY + ')' + (TRANSFORM_3D_SUPPORT ? ' translateZ(0)' : '');
},
resetVariables: function(){
this.viewportTargetX = this.viewportX;
this.viewportTargetY = this.viewportY;
//this.lastViewportTargetX = this.viewportTargetX;
//this.lastViewportTargetY = this.viewportTargetY;
this.startX = this.viewportX;
this.startY = this.viewportY;
this.currentVelocityX = 0;
this.currentVelocityY = 0;
this.currentDirectionX = 0;
this.currentDirectionY = 0;
if (this.minScrollDelta != 0)
{
this.minScrollDeltaXReached = false;
this.minScrollDeltaYReached = false;
}
this.processInertia = false;
},
startUpdate: function(){
if (this.isUpdating)
return;
this.isUpdating = true;
this.lastUpdateTime = Date.now();
this.updateFrameHandle = this.nextFrame();
this.event_start();
},
stopUpdate: function(){
if (!this.isUpdating)
return;
this.resetVariables();
this.isUpdating = false;
anim.cancelAnimationFrame(this.updateFrameHandle);
this.updateElementPosition();
this.event_finish();
},
onMouseDown: function(event){
this.stopUpdate();
this.panningActive = true;
this.isMoved = false;
this.lastMouseX = Event.mouseX(event);
this.lastMouseY = Event.mouseY(event);
this.lastMotionUpdateTime = Date.now();
Event.addGlobalHandler('mousemove', this.onMouseMove, this);
Event.addGlobalHandler('touchmove', this.onMouseMove, this);
Event.addGlobalHandler('mouseup', this.onMouseUp, this);
Event.addGlobalHandler('touchend', this.onMouseUp, this);
//Event.cancelBubble(event);
Event.cancelDefault(event);
},
onMouseMove: function(event){
if (this.minScrollDelta == 0 || this.minScrollDeltaYReached || this.minScrollDeltaXReached)
{
this.startUpdate();
}
var time = Date.now();
var deltaTime = time - this.lastMotionUpdateTime;
this.lastMotionUpdateTime = time;
if (!deltaTime)
return;
if (this.minScrollDeltaXReached || !this.minScrollDeltaYReached)
{
var curMouseX = Event.mouseX(event);
var deltaX = this.lastMouseX - curMouseX;
this.lastMouseX = curMouseX;
this.viewportTargetX += deltaX;
if (!this.isMoved && Math.abs(this.startX - this.viewportTargetX) > MOVE_THRESHOLD)
this.isMoved = true;
}
if (this.minScrollDeltaYReached || !this.minScrollDeltaXReached)
{
var curMouseY = Event.mouseY(event);
var deltaY = this.lastMouseY - curMouseY;
this.lastMouseY = curMouseY;
this.viewportTargetY += deltaY;
if (!this.isMoved && Math.abs(this.startY - this.viewportTargetY) > MOVE_THRESHOLD)
this.isMoved = true;
}
if (this.minScrollDelta > 0)
{
if (!this.minScrollDeltaXReached && !this.minScrollDeltaYReached)
{
if (Math.abs(this.viewportTargetX - this.viewportX) > this.minScrollDelta)
this.minScrollDeltaXReached = true;
if (Math.abs(this.viewportTargetY - this.viewportY) > this.minScrollDelta)
this.minScrollDeltaYReached = true;
if (this.minScrollDeltaYReached)
{
this.viewportTargetX = this.viewportX;
this.currentDirectionX = 0;
}
if (this.minScrollDeltaXReached)
{
this.viewportTargetY = this.viewportY;
this.currentDirectionY = 0;
}
}
}
Event.cancelDefault(event);
},
onMouseUp: function(){
this.panningActive = false;
this.processInertia = true;
var timeNow = Date.now();
var deltaTime = timeNow - this.lastMotionUpdateTime;
deltaTime = Math.max(10, deltaTime); // low-timer granularity compensation
this.lastMotionUpdateTime = 0;
if (this.scrollX)
{
// 100msec is a full hold gesture that complete zeroes out the velocity to be used as inertia
this.currentVelocityX *= 1 - Math.min(1, Math.max(0, deltaTime / 100));
}
if (this.scrollY)
this.currentVelocityY *= 1 - Math.min(1, Math.max(0, deltaTime / 100));
Event.removeGlobalHandler('mousemove', this.onMouseMove, this);
Event.removeGlobalHandler('touchmove', this.onMouseMove, this);
Event.removeGlobalHandler('mouseup', this.onMouseUp, this);
Event.removeGlobalHandler('touchend', this.onMouseUp, this);
this.event_startInertia();
},
onUpdate: function(time){
if (!time)
time = Date.now();
var deltaTime = time - this.lastUpdateTime;
this.lastUpdateTime = time;
if (!deltaTime)
{
this.nextFrame();
return;
}
if (this.panningActive)
{
var delta;
if (this.scrollX)
{
delta = this.viewportTargetX - this.viewportX;
if (delta)
{
this.currentVelocityX = delta / deltaTime;
this.currentDirectionX = delta == 0 ? 0 : (delta < 0 ? -1 : 1);
}
}
if (this.scrollY)
{
delta = this.viewportTargetY - this.viewportY;
if (delta)
{
this.currentVelocityY = delta / deltaTime;
this.currentDirectionY = delta == 0 ? 0 : (delta < 0 ? -1 : 1);
}
}
}
else if (this.processInertia)
{
if (this.scrollX)
{
this.viewportTargetX += (this.currentVelocityX * deltaTime);
this.currentVelocityX *= VELOCITY_DECREASE_FACTOR;
}
if (this.scrollY)
{
this.viewportTargetY += (this.currentVelocityY * deltaTime);
this.currentVelocityY *= VELOCITY_DECREASE_FACTOR;
}
}
var deltaX = 0;
var deltaY = 0;
if (this.scrollX)
{
deltaX = (this.viewportTargetX - this.viewportX);
var smoothingFactorX = this.panningActive || Math.abs(this.currentVelocityX) > 0 ? 1 : 0.12;
this.viewportX += deltaX * smoothingFactorX;
}
if (this.scrollY)
{
deltaY = (this.viewportTargetY - this.viewportY);
var smoothingFactorY = this.panningActive || Math.abs(this.currentVelocityY) > 0 ? 1 : 0.12;
this.viewportY += deltaY * smoothingFactorY;
}
var scrollXStop = !this.scrollX || (/*this.currentVelocityX < 0.01 &&*/ Math.abs(deltaX) < 0.5);
var scrollYStop = !this.scrollY || (/*this.currentVelocityY < 0.01 &&*/ Math.abs(deltaY) < 0.5);
if (!this.panningActive && scrollXStop && scrollYStop)
{
if (this.scrollX)
this.viewportX = this.viewportTargetX;
if (this.scrollY)
this.viewportY = this.viewportTargetY;
this.stopUpdate();
}
this.updateElementPosition();
this.event_updatePosition(time, this.viewportX, this.viewportY);
this.nextFrame();
},
nextFrame: function(){
if (this.isUpdating)
this.updateFrameHandle = anim.requestAnimationFrame(this.onUpdateHandler, this.targetElement);
},
setPosition: function(positionX, positionY, instantly){
this.setPositionX(positionX, !instantly);
this.setPositionY(positionY, !instantly);
},
setPositionX: function(positionX, smooth){
if (smooth)
{
this.viewportTargetX = positionX || 0;
this.currentVelocityX = 0;
this.startUpdate();
}
else
{
this.stopUpdate();
this.viewportX = positionX;
this.viewportTargetX = positionX;
this.updateElementPosition();
}
},
setPositionY: function(positionY, smooth){
if (smooth)
{
this.viewportTargetY = positionY || 0;
this.currentVelocityY = 0;
this.startUpdate();
}
else
{
this.stopUpdate();
this.viewportY = positionY;
this.viewportTargetY = positionY;
this.updateElementPosition();
}
},
getCurrentDirection: function(axis){
return axis == 'x' ? this.currentDirectionX : this.currentDirectionY;
},
calcExpectedPosition: function(axis){
var expectedInertiaDelta = 0;
var currentVelocity = axis == 'x' ? this.currentVelocityX : this.currentVelocityY;
var viewportTargetPosition = axis == 'x' ? this.viewportTargetX : this.viewportTargetY;
if (currentVelocity)
{
var expectedInertiaIterationCount = Math.log(0.001 / Math.abs(currentVelocity)) / Math.log(VELOCITY_DECREASE_FACTOR);
var velocity = currentVelocity;
for (var i = 0; i < expectedInertiaIterationCount; i++)
{
expectedInertiaDelta += velocity * AVERAGE_TICK_TIME_INTERVAL;
velocity *= VELOCITY_DECREASE_FACTOR;
}
}
// return expected position
return viewportTargetPosition + expectedInertiaDelta;
},/*,
calcExpectedPositionX: function(){
return this.calcExpectedPosition('x');
},
calcExpectedPositionY: function(){
return this.calcExpectedPosition('y');
}*/
destroy: function(){
this.setElement();
Emitter.prototype.destroy.call(this);
}
});
/**
* @class
*/
var Scrollbar = UINode.subclass({
className: namespace + '.Scrollbar',
orientation: '',
template: templates.Scrollbar,
binding: {
orientation: 'orientation'
},
listen: {
owner: {
realign: function(){
this.realign();
},
updatePosition: function(){
if (!this.trackSize)
this.realign();
var scrollPosition = this.getScrollbarPosition();
if (scrollPosition > 1)
scrollPosition = 1 + (scrollPosition - 1) * 3;
if (scrollPosition < 0)
scrollPosition *= 3;
var startPosition = Math.max(0, Math.min(this.trackSize * scrollPosition, this.scrollbarSize - 4));
var endPosition = Math.max(0, Math.min(this.trackSize - this.trackSize * scrollPosition, this.scrollbarSize - 4));
var style = {};
style[this.startProperty] = startPosition + 'px';
style[this.endProperty] = endPosition + 'px';
cssom.setStyle(this.tmpl.trackElement, style);
}
}
},
realign: function(){
this.scrollbarSize = this.getScrollbarSize();
this.trackSize = this.scrollbarSize - this.scrollbarSize * this.getScrollbarPart();
},
getScrollbarSize: basis.fn.$null,
getScrollbarPart: basis.fn.$null,
getScrollbarPosition: basis.fn.$null
});
/**
* @class
*/
var HorizontalScrollbar = Scrollbar.subclass({
className: namespace + '.HorizontalScrollbar',
orientation: 'horizontal',
startProperty: 'left',
endProperty: 'right',
getScrollbarSize: function(){
return this.element.offsetWidth;
},
getScrollbarPart: function(){
return this.owner.element.offsetWidth / (this.owner.maxPositionX - this.owner.minPositionX + this.owner.element.offsetWidth);
},
getScrollbarPosition: function(){
return (this.owner.scroller.viewportX - this.owner.minPositionX) / (this.owner.maxPositionX - this.owner.minPositionX);
}
});
/**
* @class
*/
var VerticalScrollbar = Scrollbar.subclass({
className: namespace + '.VerticalScrollbar',
orientation: 'vertical',
startProperty: 'top',
endProperty: 'bottom',
getScrollbarSize: function(){
return this.element.offsetHeight;
},
getScrollbarPart: function(){
return this.owner.element.offsetHeight / (this.owner.maxPositionY - this.owner.minPositionY + this.owner.element.offsetHeight);
},
getScrollbarPosition: function(){
return (this.owner.scroller.viewportY - this.owner.minPositionY) / (this.owner.maxPositionY - this.owner.minPositionY);
}
});
//
// Scroller
//
/**
* @class
*/
var ScrollPanel = UINode.subclass({
className: namespace + '.ScrollPanel',
useScrollbars: true,
scrollX: true,
scrollY: true,
wheelDelta: 40,
event_realign: createEvent('realign'),
event_updatePosition: createEvent('updatePosition'),
template: templates.ScrollPanel,
binding: {
horizontalScrollbar: 'satellite:',
verticalScrollbar: 'satellite:',
bothScrollbars: function(node){
return node.scrollX && node.scrollY ? 'bothScrollbars' : '';
},
scrollProcess: function(node){
return node.scroller && node.scroller.isUpdating ? 'scrollProcess' : '';
}
},
action: {
onwheel: function(event){
var delta = Event.wheelDelta(event);
if (this.scrollY)
this.scroller.setPositionY(this.scroller.viewportTargetY - this.wheelDelta * delta, true);
else if (this.scrollX)
this.scroller.setPositionX(this.scroller.viewportTargetX - this.wheelDelta * delta, true);
Event.kill(event);
}
},
satelliteConfig: {
horizontalScrollbar: {
instanceOf: HorizontalScrollbar,
existsIf: function(object){
return object.useScrollbars && object.scrollX;
}
},
verticalScrollbar: {
instanceOf: VerticalScrollbar,
existsIf: function(object){
return object.useScrollbars && object.scrollY;
}
}
},
init: function(){
UINode.prototype.init.call(this);
//init variables
this.minPositionX = 0;
this.minPositionY = 0;
this.maxPositionX = 0;
this.maxPositionY = 0;
// create scroller
var scrollerConfig = basis.object.extend(this.scroller || {}, {
targetElement: this.tmpl.scrollElement,
scrollX: this.scrollX,
scrollY: this.scrollY
});
this.scroller = new Scroller(scrollerConfig);
this.scroller.addHandler({
updatePosition: this.updatePosition,
start: function(){
if (!this.maxPositionX && !this.maxPositionY)
this.realign();
this.updateBind('scrollProcess');
},
finish: function(){
this.updateBind('scrollProcess');
}
}, this);
// add resize handler
basis.layout.addBlockResizeHandler(this.tmpl.scrollElement, this.realign.bind(this));
},
templateSync: function(noRecreate){
UINode.prototype.templateSync.call(this, noRecreate);
this.scroller.setElement(this.tmpl.scrollElement);
},
updatePosition: function(){
if (!this.scroller.panningActive)
this.fixPosition();
this.event_updatePosition();
},
fixPosition: function(){
var scroller = this.scroller;
if (this.scrollX && (scroller.viewportX < this.minPositionX || scroller.viewportX > this.maxPositionX))
{
var positionX = Math.min(this.maxPositionX, Math.max(this.minPositionX, scroller.viewportX));
scroller.setPositionX(positionX, true);
}
if (this.scrollY && (scroller.viewportY < this.minPositionY || scroller.viewportY > this.maxPositionY))
{
var positionY = Math.min(this.maxPositionY, Math.max(this.minPositionY, scroller.viewportY));
scroller.setPositionY(positionY, true);
}
},
realign: function(){
if (this.element.offsetWidth)
{
this.calcDimensions();
this.updatePosition();
this.event_realign();
}
},
calcDimensions: function(){
if (this.scrollX)
{
var containerWidth = this.element.offsetWidth;
var scrollWidth = this.tmpl.scrollElement.scrollWidth;
this.maxPositionX = Math.max(0, scrollWidth - containerWidth);
}
if (this.scrollY)
{
var containerHeight = this.element.offsetHeight;
var scrollHeight = this.tmpl.scrollElement.scrollHeight;
this.maxPositionY = Math.max(0, scrollHeight - containerHeight);
}
},
destroy: function(){
this.scroller.destroy();
this.scroller = null;
UINode.prototype.destroy.call(this);
}
});
/**
* @class
*/
var ScrollGallery = ScrollPanel.subclass({
className: namespace + '.ScrollGallery',
scrollX: false,
scrollY: false,
childTransform: basis.fn.$null,
selection: true,
action: {
onwheel: function(event){
var delta = Event.wheelDelta(event);
var selected = this.selection.pick();
var nextChild = delta == -1 ? selected.nextSibling : selected.previousSibling;
if (nextChild)
nextChild.select();
Event.kill(event);
}
},
event_childNodesModified: function(delta){
ScrollPanel.prototype.event_childNodesModified.call(this, delta);
if (this.scroller && this.childNodes.length == delta.inserted.length)
{
this.scrollToChild(this.firstChild, true);
this.firstChild.select();
}
},
childClass: UINode.subclass({
className: namespace + '.ScrollGalleryItem',
template: templates.ScrollGalleryItem,
action: {
select: function(){
if (!this.parentNode.scroller.isMoved)
this.select();
}
},
event_select: function(){
UINode.prototype.event_select.call(this);
this.parentNode.scrollToChild(this);
}
}),
init: function(){
ScrollPanel.prototype.init.call(this);
this.scroller.addHandler({
startInertia: this.adjustPosition
}, this);
if (this.childTransform != basis.fn.$null)
{
this.scroller.addHandler({
updatePosition: this.applyPosition
}, this);
}
if (!this.selection.itemCount && this.firstChild)
{
this.firstChild.select();
this.scrollToChild(this.firstChild, true);
}
},
setPosition: function(position, instantly){
if (this.scrollX)
this.scroller.setPositionX(position, !instantly);
else
this.scroller.setPositionY(position, !instantly);
},
adjustPosition: function(){
var childSize = this.scrollX ? this.firstChild.element.offsetWidth : this.firstChild.element.offsetHeight;
var startPosition = (this.scrollX ? this.element.offsetWidth : this.element.offsetHeight) / 2;
var newPosition = startPosition - childSize / 2 + this.calcExpectedPosition();
var childScrollTo = Math.max(0, Math.min(this.childNodes.length - 1, Math.round(newPosition / childSize)));
this.scrollToChild(this.childNodes[childScrollTo]);
},
applyPosition: function(){
var childSize = this.scrollX ? this.firstChild.element.offsetWidth : this.firstChild.element.offsetHeight;
var startPosition = this.scrollX ? this.element.offsetWidth / 2 : this.element.offsetHeight / 2;
var newPosition = startPosition - childSize / 2 + (this.scroller.viewportX || this.scroller.viewportY);
var closestChildPos = Math.floor(newPosition / childSize);
var offset = newPosition / childSize - closestChildPos;
var closeness;
for (var i = 0, child; child = this.childNodes[i]; i++)
{
closeness = i == closestChildPos ? 1 - offset : (i == closestChildPos + 1 ? offset : 0);
this.childTransform(child, closeness);
}
},
scrollToChild: function(child, instantly){
var startPosition = this.scrollX ? this.element.offsetWidth / 2 : this.element.offsetHeight / 2;
var childPosition = this.scrollX ? child.element.offsetLeft : child.element.offsetTop;
var childSize = this.scrollX ? child.element.offsetWidth : child.element.offsetHeight;
this.setPosition(childPosition + childSize / 2 - startPosition, instantly);
},
calcDimensions: function(){
ScrollPanel.prototype.calcDimensions.call(this);
if (this.scrollX)
{
this.minPositionX = (this.firstChild ? this.firstChild.element.offsetWidth / 2 : 0) - this.element.offsetWidth / 2;
this.maxPositionX = this.maxPositionX + this.element.offsetWidth / 2 - (this.lastChild ? this.lastChild.element.offsetWidth / 2 : 0);
}
if (this.scrollY)
{
this.minPositionY = (this.firstChild ? this.firstChild.element.offsetHeight / 2 : 0) - this.element.offsetHeight / 2;
this.maxPositionY = this.maxPositionY + this.element.offsetHeight / 2 - (this.lastChild ? this.lastChild.element.offsetHeight / 2 : 0);
}
},
calcExpectedPosition: function(){
return this.scroller.calcExpectedPosition(this.scrollX ? 'x' : 'y');
}
});
//
// export names
//
module.exports = {
Scroller: Scroller,
Scrollbar: Scrollbar,
ScrollPanel: ScrollPanel,
ScrollGallery: ScrollGallery
};
| src/basis/ui/scroller.js |
basis.require('basis.event');
basis.require('basis.dom');
basis.require('basis.dom.event');
basis.require('basis.cssom');
basis.require('basis.ui');
basis.require('basis.animation');
/**
* @see ./demo/defile/scroller.html
* @namespace basis.ui.scroller
*/
var namespace = this.path;
//
// import names
//
var DOM = basis.dom;
var Event = basis.dom.event;
var cssom = basis.cssom;
var anim = basis.animation;
var createEvent = basis.event.create;
var Emitter = basis.event.Emitter;
var UINode = basis.ui.Node;
//
// definitions
//
var templates = basis.template.define(namespace, {
Scrollbar: resource('templates/scroller/Scrollbar.tmpl'),
ScrollPanel: resource('templates/scroller/ScrollPanel.tmpl'),
ScrollGalleryItem: resource('templates/scroller/ScrollGalleryItem.tmpl')
});
//
// Main part
//
// constants
var AVERAGE_TICK_TIME_INTERVAL = 15;
var VELOCITY_DECREASE_FACTOR = 0.94;
var MOVE_THRESHOLD = 5;
// css transform/transform3d feature detection
var TRANSFORM_SUPPORT = false;
var TRANSFORM_3D_SUPPORT = false;
var TRANSFORM_PROPERTY_NAME;
(function (){
var testElement = DOM.createElement('');
function testProps(properties){
for (var i = 0, propertyName; propertyName = properties[i]; i++)
if (typeof testElement.style[propertyName] != 'undefined')
return propertyName;
return false;
}
TRANSFORM_PROPERTY_NAME = testProps([
'transform',
'WebkitTransform',
'msTransform',
'MozTransform',
'OTransform'
]);
if (TRANSFORM_PROPERTY_NAME)
TRANSFORM_SUPPORT = true;
//transform3d
if (TRANSFORM_SUPPORT)
{
var prop = testProps([
'perspectiveProperty',
'WebkitPerspective',
'MozPerspective',
'OPerspective',
'msPerspective'
]);
if (prop || 'webkitPerspective' in testElement.style)
TRANSFORM_3D_SUPPORT = true;
}
})();
/**
* @class
*/
var Scroller = Emitter.subclass({
className: namespace + '.Scroller',
minScrollDelta: 0,
scrollX: true,
scrollY: true,
event_start: createEvent('start'),
event_finish: createEvent('finish'),
event_startInertia: createEvent('startInertia'),
event_updatePosition: createEvent('updatePosition', 'scrollPosition'),
init: function(){
this.lastMouseX = 0;
this.lastMouseY = 0;
this.currentDirectionX = 0;
this.currentDirectionY = 0;
this.currentVelocityX = 0;
this.currentVelocityY = 0;
this.viewportX = 0;
this.viewportY = 0;
this.viewportTargetX = this.viewportX;
this.viewportTargetY = this.viewportY;
//this.lastViewportTargetX = this.viewportX;
//this.lastViewportTargetY = this.viewportY;
if (this.minScrollDelta == 0)
{
this.minScrollDeltaYReached = true;
this.minScrollDeltaXReached = true;
}
//time
this.updateFrameHandle = 0;
this.lastMotionUpdateTime = 0;
this.lastUpdateTime = 0;
this.startTime = 0;
//statuses
this.processInertia = false;
this.panningActive = false;
//init
Emitter.prototype.init.call(this);
var element = this.targetElement;
this.targetElement = null;
this.setElement(element);
this.onUpdateHandler = this.onUpdate.bind(this);
this.updateElementPosition = TRANSFORM_SUPPORT ? this.updatePosition_styleTransform : this.updatePosition_styleTopLeft;
},
setElement: function(element){
if (this.targetElement)
{
Event.removeHandler(this.targetElement, 'mousedown', this.onMouseDown, this);
Event.removeHandler(this.targetElement, 'touchstart', this.onMouseDown, this);
}
this.targetElement = element;
if (this.targetElement)
{
Event.addHandler(this.targetElement, 'mousedown', this.onMouseDown, this);
Event.addHandler(this.targetElement, 'touchstart', this.onMouseDown, this);
}
},
updatePosition_styleTopLeft: function(){
if (this.scrollX)
this.targetElement.style.left = -(this.viewportX) + 'px';
if (this.scrollY)
this.targetElement.style.top = -(this.viewportY) + 'px';
},
updatePosition_styleTransform: function(){
var deltaX = -(this.isUpdating ? this.viewportX : Math.round(this.viewportX)) + 'px';
var deltaY = -(this.isUpdating ? this.viewportY : Math.round(this.viewportY)) + 'px';
this.targetElement.style[TRANSFORM_PROPERTY_NAME] = 'translate(' + deltaX + ', ' + deltaY + ')' + (TRANSFORM_3D_SUPPORT ? ' translateZ(0)' : '');
},
resetVariables: function(){
this.viewportTargetX = this.viewportX;
this.viewportTargetY = this.viewportY;
//this.lastViewportTargetX = this.viewportTargetX;
//this.lastViewportTargetY = this.viewportTargetY;
this.startX = this.viewportX;
this.startY = this.viewportY;
this.currentVelocityX = 0;
this.currentVelocityY = 0;
this.currentDirectionX = 0;
this.currentDirectionY = 0;
if (this.minScrollDelta != 0)
{
this.minScrollDeltaXReached = false;
this.minScrollDeltaYReached = false;
}
this.processInertia = false;
},
startUpdate: function(){
if (this.isUpdating)
return;
this.isUpdating = true;
this.lastUpdateTime = Date.now();
this.updateFrameHandle = this.nextFrame();
this.event_start();
},
stopUpdate: function(){
if (!this.isUpdating)
return;
this.resetVariables();
this.isUpdating = false;
anim.cancelAnimationFrame(this.updateFrameHandle);
this.updateElementPosition();
this.event_finish();
},
onMouseDown: function(event){
this.stopUpdate();
this.panningActive = true;
this.isMoved = false;
this.lastMouseX = Event.mouseX(event);
this.lastMouseY = Event.mouseY(event);
this.lastMotionUpdateTime = Date.now();
Event.addGlobalHandler('mousemove', this.onMouseMove, this);
Event.addGlobalHandler('touchmove', this.onMouseMove, this);
Event.addGlobalHandler('mouseup', this.onMouseUp, this);
Event.addGlobalHandler('touchend', this.onMouseUp, this);
//Event.cancelBubble(event);
Event.cancelDefault(event);
},
onMouseMove: function(event){
if (this.minScrollDelta == 0 || this.minScrollDeltaYReached || this.minScrollDeltaXReached)
{
this.startUpdate();
}
var time = Date.now();
var deltaTime = time - this.lastMotionUpdateTime;
this.lastMotionUpdateTime = time;
if (!deltaTime)
return;
if (this.minScrollDeltaXReached || !this.minScrollDeltaYReached)
{
var curMouseX = Event.mouseX(event);
var deltaX = this.lastMouseX - curMouseX;
this.lastMouseX = curMouseX;
this.viewportTargetX += deltaX;
if (!this.isMoved && Math.abs(this.startX - this.viewportTargetX) > MOVE_THRESHOLD)
this.isMoved = true;
}
if (this.minScrollDeltaYReached || !this.minScrollDeltaXReached)
{
var curMouseY = Event.mouseY(event);
var deltaY = this.lastMouseY - curMouseY;
this.lastMouseY = curMouseY;
this.viewportTargetY += deltaY;
if (!this.isMoved && Math.abs(this.startY - this.viewportTargetY) > MOVE_THRESHOLD)
this.isMoved = true;
}
if (this.minScrollDelta > 0)
{
if (!this.minScrollDeltaXReached && !this.minScrollDeltaYReached)
{
if (Math.abs(this.viewportTargetX - this.viewportX) > this.minScrollDelta)
this.minScrollDeltaXReached = true;
if (Math.abs(this.viewportTargetY - this.viewportY) > this.minScrollDelta)
this.minScrollDeltaYReached = true;
if (this.minScrollDeltaYReached)
{
this.viewportTargetX = this.viewportX;
this.currentDirectionX = 0;
}
if (this.minScrollDeltaXReached)
{
this.viewportTargetY = this.viewportY;
this.currentDirectionY = 0;
}
}
}
Event.cancelDefault(event);
},
onMouseUp: function(){
this.panningActive = false;
this.processInertia = true;
var timeNow = Date.now();
var deltaTime = timeNow - this.lastMotionUpdateTime;
deltaTime = Math.max(10, deltaTime); // low-timer granularity compensation
this.lastMotionUpdateTime = 0;
if (this.scrollX)
{
// 100msec is a full hold gesture that complete zeroes out the velocity to be used as inertia
this.currentVelocityX *= 1 - Math.min(1, Math.max(0, deltaTime / 100));
}
if (this.scrollY)
this.currentVelocityY *= 1 - Math.min(1, Math.max(0, deltaTime / 100));
Event.removeGlobalHandler('mousemove', this.onMouseMove, this);
Event.removeGlobalHandler('touchmove', this.onMouseMove, this);
Event.removeGlobalHandler('mouseup', this.onMouseUp, this);
Event.removeGlobalHandler('touchend', this.onMouseUp, this);
this.event_startInertia();
},
onUpdate: function(time){
if (!time)
time = Date.now();
var deltaTime = time - this.lastUpdateTime;
this.lastUpdateTime = time;
if (!deltaTime)
{
this.nextFrame();
return;
}
if (this.panningActive)
{
var delta;
if (this.scrollX)
{
delta = this.viewportTargetX - this.viewportX;
if (delta)
{
this.currentVelocityX = delta / deltaTime;
this.currentDirectionX = delta == 0 ? 0 : (delta < 0 ? -1 : 1);
}
}
if (this.scrollY)
{
delta = this.viewportTargetY - this.viewportY;
if (delta)
{
this.currentVelocityY = delta / deltaTime;
this.currentDirectionY = delta == 0 ? 0 : (delta < 0 ? -1 : 1);
}
}
}
else if (this.processInertia)
{
if (this.scrollX)
{
this.viewportTargetX += (this.currentVelocityX * deltaTime);
this.currentVelocityX *= VELOCITY_DECREASE_FACTOR;
}
if (this.scrollY)
{
this.viewportTargetY += (this.currentVelocityY * deltaTime);
this.currentVelocityY *= VELOCITY_DECREASE_FACTOR;
}
}
var deltaX = 0;
var deltaY = 0;
if (this.scrollX)
{
deltaX = (this.viewportTargetX - this.viewportX);
var smoothingFactorX = this.panningActive || Math.abs(this.currentVelocityX) > 0 ? 1 : 0.12;
this.viewportX += deltaX * smoothingFactorX;
}
if (this.scrollY)
{
deltaY = (this.viewportTargetY - this.viewportY);
var smoothingFactorY = this.panningActive || Math.abs(this.currentVelocityY) > 0 ? 1 : 0.12;
this.viewportY += deltaY * smoothingFactorY;
}
var scrollXStop = !this.scrollX || (/*this.currentVelocityX < 0.01 &&*/ Math.abs(deltaX) < 0.5);
var scrollYStop = !this.scrollY || (/*this.currentVelocityY < 0.01 &&*/ Math.abs(deltaY) < 0.5);
if (!this.panningActive && scrollXStop && scrollYStop)
{
if (this.scrollX)
this.viewportX = this.viewportTargetX;
if (this.scrollY)
this.viewportY = this.viewportTargetY;
this.stopUpdate();
}
this.updateElementPosition();
this.event_updatePosition(time, this.viewportX, this.viewportY);
this.nextFrame();
},
nextFrame: function(){
if (this.isUpdating)
this.updateFrameHandle = anim.requestAnimationFrame(this.onUpdateHandler, this.targetElement);
},
setPosition: function(positionX, positionY, instantly){
this.setPositionX(positionX, !instantly);
this.setPositionY(positionY, !instantly);
},
setPositionX: function(positionX, smooth){
if (smooth)
{
this.viewportTargetX = positionX || 0;
this.currentVelocityX = 0;
this.startUpdate();
}
else
{
this.stopUpdate();
this.viewportX = positionX;
this.viewportTargetX = positionX;
this.updateElementPosition();
}
},
setPositionY: function(positionY, smooth){
if (smooth)
{
this.viewportTargetY = positionY || 0;
this.currentVelocityY = 0;
this.startUpdate();
}
else
{
this.stopUpdate();
this.viewportY = positionY;
this.viewportTargetY = positionY;
this.updateElementPosition();
}
},
getCurrentDirection: function(axis){
return axis == 'x' ? this.currentDirectionX : this.currentDirectionY;
},
calcExpectedPosition: function(axis){
var expectedInertiaDelta = 0;
var currentVelocity = axis == 'x' ? this.currentVelocityX : this.currentVelocityY;
var viewportTargetPosition = axis == 'x' ? this.viewportTargetX : this.viewportTargetY;
if (currentVelocity)
{
var expectedInertiaIterationCount = Math.log(0.001 / Math.abs(currentVelocity)) / Math.log(VELOCITY_DECREASE_FACTOR);
var velocity = currentVelocity;
for (var i = 0; i < expectedInertiaIterationCount; i++)
{
expectedInertiaDelta += velocity * AVERAGE_TICK_TIME_INTERVAL;
velocity *= VELOCITY_DECREASE_FACTOR;
}
}
// return expected position
return viewportTargetPosition + expectedInertiaDelta;
},/*,
calcExpectedPositionX: function(){
return this.calcExpectedPosition('x');
},
calcExpectedPositionY: function(){
return this.calcExpectedPosition('y');
}*/
destroy: function(){
this.setElement();
Emitter.prototype.destroy.call(this);
}
});
/**
* @class
*/
var Scrollbar = UINode.subclass({
className: namespace + '.Scrollbar',
orientation: '',
template: templates.Scrollbar,
binding: {
orientation: 'orientation'
},
listen: {
owner: {
realign: function(){
this.realign();
},
updatePosition: function(){
if (!this.trackSize)
this.realign();
var scrollPosition = this.getScrollbarPosition();
if (scrollPosition > 1)
scrollPosition = 1 + (scrollPosition - 1) * 3;
if (scrollPosition < 0)
scrollPosition *= 3;
var startPosition = Math.max(0, Math.min(this.trackSize * scrollPosition, this.scrollbarSize - 4));
var endPosition = Math.max(0, Math.min(this.trackSize - this.trackSize * scrollPosition, this.scrollbarSize - 4));
var style = {};
style[this.startProperty] = startPosition + 'px';
style[this.endProperty] = endPosition + 'px';
cssom.setStyle(this.tmpl.trackElement, style);
}
}
},
realign: function(){
this.scrollbarSize = this.getScrollbarSize();
this.trackSize = this.scrollbarSize - this.scrollbarSize * this.getScrollbarPart();
},
getScrollbarSize: basis.fn.$null,
getScrollbarPart: basis.fn.$null,
getScrollbarPosition: basis.fn.$null
});
/**
* @class
*/
var HorizontalScrollbar = Scrollbar.subclass({
className: namespace + '.HorizontalScrollbar',
orientation: 'horizontal',
startProperty: 'left',
endProperty: 'right',
getScrollbarSize: function(){
return this.element.offsetWidth;
},
getScrollbarPart: function(){
return this.owner.element.offsetWidth / (this.owner.maxPositionX - this.owner.minPositionX + this.owner.element.offsetWidth);
},
getScrollbarPosition: function(){
return (this.owner.scroller.viewportX - this.owner.minPositionX) / (this.owner.maxPositionX - this.owner.minPositionX);
}
});
/**
* @class
*/
var VerticalScrollbar = Scrollbar.subclass({
className: namespace + '.VerticalScrollbar',
orientation: 'vertical',
startProperty: 'top',
endProperty: 'bottom',
getScrollbarSize: function(){
return this.element.offsetHeight;
},
getScrollbarPart: function(){
return this.owner.element.offsetHeight / (this.owner.maxPositionY - this.owner.minPositionY + this.owner.element.offsetHeight);
},
getScrollbarPosition: function(){
return (this.owner.scroller.viewportY - this.owner.minPositionY) / (this.owner.maxPositionY - this.owner.minPositionY);
}
});
//
// Scroller
//
/**
* @class
*/
var ScrollPanel = UINode.subclass({
className: namespace + '.ScrollPanel',
useScrollbars: true,
scrollX: true,
scrollY: true,
wheelDelta: 40,
event_realign: createEvent('realign'),
event_updatePosition: createEvent('updatePosition'),
template: templates.ScrollPanel,
binding: {
horizontalScrollbar: 'satellite:',
verticalScrollbar: 'satellite:',
bothScrollbars: function(node){
return node.scrollX && node.scrollY ? 'bothScrollbars' : '';
},
scrollProcess: function(node){
return node.scroller && node.scroller.isUpdating ? 'scrollProcess' : '';
}
},
action: {
onwheel: function(event){
var delta = Event.wheelDelta(event);
if (this.scrollY)
this.scroller.setPositionY(this.scroller.viewportTargetY - this.wheelDelta * delta, true);
else if (this.scrollX)
this.scroller.setPositionX(this.scroller.viewportTargetX - this.wheelDelta * delta, true);
Event.kill(event);
}
},
satelliteConfig: {
horizontalScrollbar: {
instanceOf: HorizontalScrollbar,
existsIf: function(object){
return object.useScrollbars && object.scrollX;
}
},
verticalScrollbar: {
instanceOf: VerticalScrollbar,
existsIf: function(object){
return object.useScrollbars && object.scrollY;
}
}
},
init: function(){
UINode.prototype.init.call(this);
//init variables
this.minPositionX = 0;
this.minPositionY = 0;
this.maxPositionX = 0;
this.maxPositionY = 0;
// create scroller
var scrollerConfig = basis.object.extend(this.scroller || {}, {
targetElement: this.tmpl.scrollElement,
scrollX: this.scrollX,
scrollY: this.scrollY
});
this.scroller = new Scroller(scrollerConfig);
this.scroller.addHandler({
updatePosition: this.updatePosition,
start: function(){
if (!this.maxPositionX && !this.maxPositionY)
this.realign();
this.updateBind('scrollProcess');
},
finish: function(){
this.updateBind('scrollProcess');
}
}, this);
// add resize handler
basis.layout.addBlockResizeHandler(this.tmpl.scrollElement, this.realign.bind(this));
},
templateSync: function(noRecreate){
UINode.prototype.templateSync.call(this, noRecreate);
this.scroller.setElement(this.tmpl.scrollElement);
},
updatePosition: function(){
if (!this.scroller.panningActive)
this.fixPosition();
this.event_updatePosition();
},
fixPosition: function(){
var scroller = this.scroller;
if (this.scrollX && (scroller.viewportX < this.minPositionX || scroller.viewportX > this.maxPositionX))
{
var positionX = Math.min(this.maxPositionX, Math.max(this.minPositionX, scroller.viewportX));
scroller.setPositionX(positionX, true);
}
if (this.scrollY && (scroller.viewportY < this.minPositionY || scroller.viewportY > this.maxPositionY))
{
var positionY = Math.min(this.maxPositionY, Math.max(this.minPositionY, scroller.viewportY));
scroller.setPositionY(positionY, true);
}
},
realign: function(){
if (this.element.offsetWidth)
{
this.calcDimensions();
this.updatePosition();
this.event_realign();
}
},
calcDimensions: function(){
if (this.scrollX)
{
var containerWidth = this.element.offsetWidth;
var scrollWidth = this.tmpl.scrollElement.scrollWidth;
this.maxPositionX = Math.max(0, scrollWidth - containerWidth);
}
if (this.scrollY)
{
var containerHeight = this.element.offsetHeight;
var scrollHeight = this.tmpl.scrollElement.scrollHeight;
this.maxPositionY = Math.max(0, scrollHeight - containerHeight);
}
},
destroy: function(){
this.scroller.destroy();
this.scroller = null;
UINode.prototype.destroy.call(this);
}
});
/**
* @class
*/
var ScrollGallery = ScrollPanel.subclass({
className: namespace + '.ScrollGallery',
scrollX: false,
scrollY: false,
childTransform: basis.fn.$null,
selection: true,
action: {
onwheel: function(event){
var delta = Event.wheelDelta(event);
var selected = this.selection.pick();
var nextChild = delta == -1 ? selected.nextSibling : selected.previousSibling;
if (nextChild)
nextChild.select();
Event.kill(event);
}
},
event_childNodesModified: function(delta){
ScrollPanel.prototype.event_childNodesModified.call(this, delta);
if (this.scroller && this.childNodes.length == delta.inserted.length)
{
this.scrollToChild(this.firstChild, true);
this.firstChild.select();
}
},
childClass: UINode.subclass({
className: namespace + '.ScrollGalleryItem',
template: templates.ScrollGalleryItem,
action: {
select: function(){
if (!this.parentNode.scroller.isMoved)
this.select();
}
},
event_select: function(){
UINode.prototype.event_select.call(this);
this.parentNode.scrollToChild(this);
}
}),
init: function(){
ScrollPanel.prototype.init.call(this);
this.scroller.addHandler({
startInertia: this.adjustPosition
}, this);
if (this.childTransform != basis.fn.$null)
{
this.scroller.addHandler({
updatePosition: this.applyPosition
}, this);
}
if (!this.selection.itemCount && this.firstChild)
{
this.firstChild.select();
this.scrollToChild(this.firstChild, true);
}
},
setPosition: function(position, instantly){
if (this.scrollX)
this.scroller.setPositionX(position, !instantly);
else
this.scroller.setPositionY(position, !instantly);
},
adjustPosition: function(){
var childSize = this.scrollX ? this.firstChild.element.offsetWidth : this.firstChild.element.offsetHeight;
var startPosition = (this.scrollX ? this.element.offsetWidth : this.element.offsetHeight) / 2;
var newPosition = startPosition - childSize / 2 + this.calcExpectedPosition();
var childScrollTo = Math.max(0, Math.min(this.childNodes.length - 1, Math.round(newPosition / childSize)));
this.scrollToChild(this.childNodes[childScrollTo]);
},
applyPosition: function(){
var childSize = this.scrollX ? this.firstChild.element.offsetWidth : this.firstChild.element.offsetHeight;
var startPosition = this.scrollX ? this.element.offsetWidth / 2 : this.element.offsetHeight / 2;
var newPosition = startPosition - childSize / 2 + (this.scroller.viewportX || this.scroller.viewportY);
var closestChildPos = Math.floor(newPosition / childSize);
var offset = newPosition / childSize - closestChildPos;
var closeness;
for (var i = 0, child; child = this.childNodes[i]; i++)
{
closeness = i == closestChildPos ? 1 - offset : (i == closestChildPos + 1 ? offset : 0);
this.childTransform(child, closeness);
}
},
scrollToChild: function(child, instantly){
var startPosition = this.scrollX ? this.element.offsetWidth / 2 : this.element.offsetHeight / 2;
var childPosition = this.scrollX ? child.element.offsetLeft : child.element.offsetTop;
var childSize = this.scrollX ? child.element.offsetWidth : child.element.offsetHeight;
this.setPosition(childPosition + childSize / 2 - startPosition, instantly);
},
calcDimensions: function(){
ScrollPanel.prototype.calcDimensions.call(this);
if (this.scrollX)
{
this.minPositionX = (this.firstChild ? this.firstChild.element.offsetWidth / 2 : 0) - this.element.offsetWidth / 2;
this.maxPositionX = this.maxPositionX + this.element.offsetWidth / 2 - (this.lastChild ? this.lastChild.element.offsetWidth / 2 : 0);
}
if (this.scrollY)
{
this.minPositionY = (this.firstChild ? this.firstChild.element.offsetHeight / 2 : 0) - this.element.offsetHeight / 2;
this.maxPositionY = this.maxPositionY + this.element.offsetHeight / 2 - (this.lastChild ? this.lastChild.element.offsetHeight / 2 : 0);
}
},
calcExpectedPosition: function(){
return this.scroller.calcExpectedPosition(this.scrollX ? 'x' : 'y');
}
});
//
// export names
//
module.exports = {
Scroller: Scroller,
Scrollbar: Scrollbar,
ScrollPanel: ScrollPanel,
ScrollGallery: ScrollGallery
};
| remove basis.dom dependancy
| src/basis/ui/scroller.js | remove basis.dom dependancy | <ide><path>rc/basis/ui/scroller.js
<ide>
<ide> basis.require('basis.event');
<del> basis.require('basis.dom');
<ide> basis.require('basis.dom.event');
<ide> basis.require('basis.cssom');
<ide> basis.require('basis.ui');
<ide> // import names
<ide> //
<ide>
<del> var DOM = basis.dom;
<ide> var Event = basis.dom.event;
<ide> var cssom = basis.cssom;
<ide> var anim = basis.animation;
<ide> var TRANSFORM_PROPERTY_NAME;
<ide>
<ide> (function (){
<del> var testElement = DOM.createElement('');
<add> var style = document.body.style;
<ide>
<ide> function testProps(properties){
<ide> for (var i = 0, propertyName; propertyName = properties[i]; i++)
<del> if (typeof testElement.style[propertyName] != 'undefined')
<add> if (typeof style[propertyName] != 'undefined')
<ide> return propertyName;
<ide>
<ide> return false;
<ide> 'msPerspective'
<ide> ]);
<ide>
<del> if (prop || 'webkitPerspective' in testElement.style)
<add> if (prop || 'webkitPerspective' in style)
<ide> TRANSFORM_3D_SUPPORT = true;
<ide> }
<ide> })(); |
|
JavaScript | apache-2.0 | 4e74f4986abbb076a9007579db3ae821690f5ea2 | 0 | eregn001/TPSEricTest1,eregn001/TPSEricTest1,Esri/offline-editor-js,Esri/offline-editor-js,guo7711/offline-editor-js,EnzeZY/offline-editor-js,EnzeZY/offline-editor-js,plaxdan/offline-editor-js,EnzeZY/offline-editor-js,andygup/offline-editor-js,Ricardh522/offline-editor-js,eregn001/test5,guo7711/offline-editor-js,degathem/offline-editor-js,Ricardh522/offline-editor-js,Esri/offline-editor-js,eregn001/test5,degathem/offline-editor-js,eregn001/test5,plaxdan/offline-editor-js,guo7711/offline-editor-js,eregn001/TPSEricTest1,andygup/offline-editor-js,andygup/offline-editor-js,plaxdan/offline-editor-js,Ricardh522/offline-editor-js,degathem/offline-editor-js | "use strict"
var map;
var basemapLayer;
var graphics;
var cancelRequested, startTime;
var showTiles = false;
require(["esri/map",
"esri/layers/GraphicsLayer", "esri/graphic", "esri/symbols/SimpleFillSymbol",
"esri/dijit/Scalebar",
"esri/arcgis/utils",
"esri/geometry",
"dojo/dom",
"dojo/on",
"dojo/query",
"../vendor/bootstrap-map-js/src/js/bootstrapmap.js",
"esri/urlUtils",
"esri/geometry/webMercatorUtils",
"src/offlineEnabler.js",
"dojo/dom-construct",
"dojo/domReady!"],
function(Map, GraphicsLayer, Graphic, SimpleFillSymbol, Scalebar, esriUtils, geometry, dom, on, query, BootstrapMap, urlUtils, webMercatorUtils,
offlineEnabler,
domConstruct)
{
var scalebar;
var symbol;
// Load web map when page loads
var urlObject = urlUtils.urlToObject(window.location.href);
var webmapid;
if( urlObject.query && urlObject.query.webmap)
webmapid = urlObject.query.webmap;
loadWebmap(webmapid);
function loadWebmap(webmapid)
{
webmapid = webmapid || "f58996878ac24702afef792e52a07e55";
// Get new webmap and extract map and map parts
var mapDeferred = esriUtils.createMap(webmapid, "mapDiv", {
mapOptions: {
slider: true,
nav:false,
smartNavigation:false
}
});
mapDeferred.then(function(response)
{
map = response.map;
// Bind to map
BootstrapMap.bindTo(map);
// Add title
dom.byId("mapTitle").innerHTML = response.itemInfo.item.title;
if(map.loaded)
{
basemapLayer = map.getLayer( map.layerIds[0] );
initMapParts();
initEvents();
initOffline();
}
else
{
on(map,"load",function()
{
basemapLayer = map.getLayer( map.layerIds[0] );
initMapParts();
initEvents();
initOffline();
});
}
},function(error){
alert("Sorry, couldn't load webmap! " + dojo.toJson(error));
console.log("Error loading webmap: ", dojo.toJson(error));
});
}
function initMapParts()
{
scalebar = new Scalebar({
map:map,
scalebarUnit: 'metric'
});
graphics = new GraphicsLayer();
map.addLayer( graphics );
symbol = new SimpleFillSymbol({
"type": "esriSFS",
"style": "esriSFSSolid",
"color": [255,0,0,5],
"outline": {
"type": "esriSLS",
"style": "esriSLSSolid",
"color": [0,0,0,100],
"width": 0.5
}
});
}
function initEvents()
{
map.on('extent-change', updateTileCountEstimation );
on(dojo.byId('minLevel'),'change', updateTileCountEstimation);
on(dojo.byId('maxLevel'),'change', updateTileCountEstimation);
dojo.byId('minLevel').value = basemapLayer.minLevel = basemapLayer.tileInfo.lods[0].level;
dojo.byId('maxLevel').value = basemapLayer.maxLevel = basemapLayer.tileInfo.lods[basemapLayer.tileInfo.lods.length-1].level;
}
function initOffline()
{
console.log("extending");
offlineEnabler.extend(basemapLayer,function(success)
{
if(success)
{
on(dojo.byId('prepare-for-offline-btn'),'click', prepareForOffline);
on(dojo.byId('cancel-btn'),'click', cancel);
on(dojo.byId('delete-all-tiles-btn'),'click', deleteAllTiles);
on(dojo.byId('go-offline-btn'),'click', goOffline);
on(dojo.byId('go-online-btn'),'click', goOnline);
on(dojo.byId('update-offline-usage'),'click', updateOfflineUsage);
on(dojo.byId('show-stored-tiles'),'click', toggleShowStoredTiles);
esri.show(dojo.byId('ready-to-download-ui'));
esri.hide(dojo.byId('downloading-ui'));
updateOfflineUsage();
updateTileCountEstimation();
}
else
{
dojo.byId('prepare-for-offline-btn').disabled = true;
esri.hide(dojo.byId('downloading-ui'));
/* JAMI: TODO add message telling that something failed while initing the indexedDB */
}
});
Offline.on('up', goOnline );
Offline.on('down', goOffline );
}
function updateOfflineUsage()
{
dojo.byId('offline-usage').innerHTML = "updating...";
basemapLayer.getOfflineUsage(function(usage)
{
console.log(usage);
console.log("Avg tile size:", Math.round(usage.size * 1024 / usage.tileCount * 100) / 100, "Kb");
var usageStr = usage.size + " Mb (" + usage.tileCount + " tiles)";
dojo.byId('offline-usage').innerHTML = usageStr;
});
}
function updateTileCountEstimation()
{
console.log('updating');
var zoomLevel = map.getLevel();
dojo.byId('currentLevel').value = zoomLevel;
var minLevel = parseInt(dojo.byId('minLevel').value);
var maxLevel = parseInt(dojo.byId('maxLevel').value);
if( maxLevel > zoomLevel + 3 || maxLevel > basemapLayer.maxLevel)
{
maxLevel = Math.min(basemapLayer.maxLevel, zoomLevel + 3);
dojo.byId('maxLevel').value = maxLevel;
}
var totalEstimation = { tileCount:0, sizeBytes:0 }
domConstruct.empty('tile-count-table-body');
for(var level=minLevel; level<=maxLevel; level++)
{
var levelEstimation = basemapLayer.getLevelEstimation(map.extent,level);
totalEstimation.tileCount += levelEstimation.tileCount;
totalEstimation.sizeBytes += levelEstimation.sizeBytes;
if( levelEstimation.tileCount > 1)
{
var rowContent = [levelEstimation.level, levelEstimation.tileCount, Math.round(levelEstimation.sizeBytes / 1024 / 1024 * 100) / 100 + " Mb"]
rowContent = "<td>" + rowContent.join("</td><td>") + "</td>";
var tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place(rowContent, tr,'last');
}
if( totalEstimation.tileCount > 5000 )
{
var tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place("<td colspan=4>...</td>", tr,'last');
break;
}
}
rowContent = ["Total", totalEstimation.tileCount, Math.floor(totalEstimation.sizeBytes / 1024 / 1024 * 100)/100 + " Mb"];
rowContent = "<td><b>" + rowContent.join("</b></td><td><b>") + "</b></td>";
tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place(rowContent, tr,'last');
}
function goOffline()
{
dojo.byId('go-offline-btn').disabled = true;
dojo.byId('go-online-btn').disabled = undefined;
basemapLayer.goOffline();
}
function goOnline()
{
dojo.byId('go-offline-btn').disabled = undefined;
dojo.byId('go-online-btn').disabled = true;
basemapLayer.goOnline();
}
function deleteAllTiles()
{
basemapLayer.deleteAllTiles(function(success, err)
{
console.log("deleteAllTiles():", success,err);
if( success )
alert("All tiles deleted");
else
alert("Can't delete tiles");
setTimeout(updateOfflineUsage,0); // request execution in the next turn of the event loop
});
}
function prepareForOffline()
{
/* put UI in downloading mode */
cancelRequested = false;
reportProgress(0,1);
esri.hide(dojo.byId('ready-to-download-ui'));
esri.show(dojo.byId('downloading-ui'));
startTime = new Date();
/* launch offline preparation process */
var minLevel = parseInt(dojo.byId('minLevel').value);
var maxLevel = parseInt(dojo.byId('maxLevel').value);
basemapLayer.prepareForOffline(minLevel, maxLevel, map.extent, reportProgress, finishedDownloading);
}
function cancel()
{
cancelRequested = true;
}
function reportProgress(countNow,countMax)
{
var pbar = query('#download-progress [role=progressbar]')[0];
var percent = countMax? (countNow / countMax * 100) : 0;
pbar.style.width = percent+"%";
if( countNow > 5 )
{
var currentTime = new Date();
var elapsedTime = currentTime - startTime;
var remainingTime = (elapsedTime / countNow) * (countMax - countNow);
var sec = 1 + Math.floor(remainingTime / 1000);
var min = Math.floor(sec / 60);
sec -= (min * 60);
dojo.byId('remaining-time').innerHTML = ((min<10)? "0" + min : min) + ":" + ((sec<10)? "0" + sec : sec);
}
return cancelRequested;
}
function finishedDownloading(cancelled)
{
setTimeout(function()
{
esri.show(dojo.byId('ready-to-download-ui'));
esri.hide(dojo.byId('downloading-ui'));
updateOfflineUsage();
showStoredTiles(showTiles);
}, 1000);
}
function toggleShowStoredTiles()
{
showTiles = !showTiles;
dojo.byId('show-stored-tiles-caption').innerHTML = showTiles? "Hide Stored Tiles" : "Show Stored Tiles";
showStoredTiles(showTiles);
}
function showStoredTiles(showTiles)
{
graphics.clear();
if( showTiles )
{
basemapLayer.getTilePolygons(function(polygon,err)
{
if(polygon)
{
var graphic = new Graphic(polygon, symbol);
graphics.add(graphic);
}
else
{
console.log("showStoredTiles: ", err);
}
}.bind(this));
}
}
});
| tiles/main.js | "use strict"
var map;
var basemapLayer;
var graphics;
var cancelRequested, startTime;
var showTiles = false;
require(["esri/map",
"esri/layers/GraphicsLayer", "esri/graphic", "esri/symbols/SimpleFillSymbol",
"esri/dijit/Scalebar",
"esri/arcgis/utils",
"esri/geometry",
"dojo/dom",
"dojo/on",
"dojo/query",
"../vendor/bootstrap-map-js/src/js/bootstrapmap.js",
"esri/urlUtils",
"esri/geometry/webMercatorUtils",
"src/offlineEnabler.js",
"dojo/dom-construct",
"dojo/domReady!"],
function(Map, GraphicsLayer, Graphic, SimpleFillSymbol, Scalebar, esriUtils, geometry, dom, on, query, BootstrapMap, urlUtils, webMercatorUtils,
offlineEnabler,
domConstruct)
{
var scalebar;
var symbol;
// Load web map when page loads
var urlObject = urlUtils.urlToObject(window.location.href);
var webmapid;
if( urlObject.query && urlObject.query.webmap)
webmapid = urlObject.query.webmap;
loadWebmap(webmapid);
function loadWebmap(webmapid)
{
webmapid = webmapid || "f58996878ac24702afef792e52a07e55";
// Get new webmap and extract map and map parts
var mapDeferred = esriUtils.createMap(webmapid, "mapDiv", {
mapOptions: {
slider: true,
nav:false,
smartNavigation:false
}
});
mapDeferred.then(function(response)
{
map = response.map;
// Bind to map
BootstrapMap.bindTo(map);
// Add title
dom.byId("mapTitle").innerHTML = response.itemInfo.item.title;
if(map.loaded)
{
basemapLayer = map.getLayer( map.layerIds[0] );
initMapParts();
initEvents();
initOffline();
}
else
{
on(map,"load",function()
{
basemapLayer = map.getLayer( map.layerIds[0] );
initMapParts();
initEvents();
initOffline();
});
}
},function(error){
alert("Sorry, couldn't load webmap! " + dojo.toJson(error));
console.log("Error loading webmap: ", dojo.toJson(error));
});
}
function initMapParts()
{
scalebar = new Scalebar({
map:map,
scalebarUnit: 'metric'
});
graphics = new GraphicsLayer();
map.addLayer( graphics );
symbol = new SimpleFillSymbol({
"type": "esriSFS",
"style": "esriSFSSolid",
"color": [255,0,0,5],
"outline": {
"type": "esriSLS",
"style": "esriSLSSolid",
"color": [0,0,0,100],
"width": 0.5
}
});
}
function initEvents()
{
map.on('extent-change', updateTileCountEstimation );
on(dojo.byId('minLevel'),'change', updateTileCountEstimation);
on(dojo.byId('maxLevel'),'change', updateTileCountEstimation);
dojo.byId('minLevel').value = basemapLayer.minLevel = basemapLayer.tileInfo.lods[0].level;
dojo.byId('maxLevel').value = basemapLayer.maxLevel = basemapLayer.tileInfo.lods[basemapLayer.tileInfo.lods.length-1].level;
}
function initOffline()
{
console.log("extending");
offlineEnabler.extend(basemapLayer,function(success)
{
if(success)
{
on(dojo.byId('prepare-for-offline-btn'),'click', prepareForOffline);
on(dojo.byId('cancel-btn'),'click', cancel);
on(dojo.byId('delete-all-tiles-btn'),'click', deleteAllTiles);
on(dojo.byId('go-offline-btn'),'click', goOffline);
on(dojo.byId('go-online-btn'),'click', goOnline);
on(dojo.byId('update-offline-usage'),'click', updateOfflineUsage);
on(dojo.byId('show-stored-tiles'),'click', toggleShowStoredTiles);
esri.show(dojo.byId('ready-to-download-ui'));
esri.hide(dojo.byId('downloading-ui'));
updateOfflineUsage();
updateTileCountEstimation();
}
else
{
dojo.byId('prepare-for-offline-btn').disabled = true;
esri.hide(dojo.byId('downloading-ui'));
/* JAMI: TODO add message telling that something failed while initing the indexedDB */
}
});
Offline.on('up', goOnline );
Offline.on('down', goOffline );
}
function updateOfflineUsage()
{
dojo.byId('offline-usage').innerHTML = "updating...";
basemapLayer.getOfflineUsage(function(usage)
{
console.log(usage);
console.log("Avg tile size:", Math.round(usage.size * 1024 / usage.tileCount * 100) / 100, "Kb");
var usageStr = usage.size + " Mb (" + usage.tileCount + " tiles)";
dojo.byId('offline-usage').innerHTML = usageStr;
});
}
function updateTileCountEstimation()
{
console.log('updating');
var zoomLevel = map.getLevel();
dojo.byId('currentLevel').value = zoomLevel;
var minLevel = parseInt(dojo.byId('minLevel').value);
var maxLevel = parseInt(dojo.byId('maxLevel').value);
if( maxLevel > zoomLevel + 3)
{
maxLevel = zoomLevel + 3;
dojo.byId('maxLevel').value = maxLevel;
}
var totalEstimation = { tileCount:0, sizeBytes:0 }
domConstruct.empty('tile-count-table-body');
for(var level=minLevel; level<=maxLevel; level++)
{
var levelEstimation = basemapLayer.getLevelEstimation(map.extent,level);
totalEstimation.tileCount += levelEstimation.tileCount;
totalEstimation.sizeBytes += levelEstimation.sizeBytes;
if( levelEstimation.tileCount > 1)
{
var rowContent = [levelEstimation.level, levelEstimation.tileCount, Math.round(levelEstimation.sizeBytes / 1024 / 1024 * 100) / 100 + " Mb"]
rowContent = "<td>" + rowContent.join("</td><td>") + "</td>";
var tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place(rowContent, tr,'last');
}
if( totalEstimation.tileCount > 5000 )
{
var tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place("<td colspan=4>...</td>", tr,'last');
break;
}
}
rowContent = ["Total", totalEstimation.tileCount, Math.floor(totalEstimation.sizeBytes / 1024 / 1024 * 100)/100 + " Mb"];
rowContent = "<td><b>" + rowContent.join("</b></td><td><b>") + "</b></td>";
tr = domConstruct.place("<tr>", dojo.byId('tile-count-table-body'),'last')
domConstruct.place(rowContent, tr,'last');
}
function goOffline()
{
dojo.byId('go-offline-btn').disabled = true;
dojo.byId('go-online-btn').disabled = undefined;
basemapLayer.goOffline();
}
function goOnline()
{
dojo.byId('go-offline-btn').disabled = undefined;
dojo.byId('go-online-btn').disabled = true;
basemapLayer.goOnline();
}
function deleteAllTiles()
{
basemapLayer.deleteAllTiles(function(success, err)
{
console.log("deleteAllTiles():", success,err);
if( success )
alert("All tiles deleted");
else
alert("Can't delete tiles");
setTimeout(updateOfflineUsage,0); // request execution in the next turn of the event loop
});
}
function prepareForOffline()
{
/* put UI in downloading mode */
cancelRequested = false;
reportProgress(0,1);
esri.hide(dojo.byId('ready-to-download-ui'));
esri.show(dojo.byId('downloading-ui'));
startTime = new Date();
/* launch offline preparation process */
var minLevel = parseInt(dojo.byId('minLevel').value);
var maxLevel = parseInt(dojo.byId('maxLevel').value);
basemapLayer.prepareForOffline(minLevel, maxLevel, map.extent, reportProgress, finishedDownloading);
}
function cancel()
{
cancelRequested = true;
}
function reportProgress(countNow,countMax)
{
var pbar = query('#download-progress [role=progressbar]')[0];
var percent = countMax? (countNow / countMax * 100) : 0;
pbar.style.width = percent+"%";
if( countNow > 5 )
{
var currentTime = new Date();
var elapsedTime = currentTime - startTime;
var remainingTime = (elapsedTime / countNow) * (countMax - countNow);
var sec = 1 + Math.floor(remainingTime / 1000);
var min = Math.floor(sec / 60);
sec -= (min * 60);
dojo.byId('remaining-time').innerHTML = ((min<10)? "0" + min : min) + ":" + ((sec<10)? "0" + sec : sec);
}
return cancelRequested;
}
function finishedDownloading(cancelled)
{
setTimeout(function()
{
esri.show(dojo.byId('ready-to-download-ui'));
esri.hide(dojo.byId('downloading-ui'));
updateOfflineUsage();
showStoredTiles(showTiles);
}, 1000);
}
function toggleShowStoredTiles()
{
showTiles = !showTiles;
dojo.byId('show-stored-tiles-caption').innerHTML = showTiles? "Hide Stored Tiles" : "Show Stored Tiles";
showStoredTiles(showTiles);
}
function showStoredTiles(showTiles)
{
graphics.clear();
if( showTiles )
{
basemapLayer.getTilePolygons(function(polygon,err)
{
if(polygon)
{
var graphic = new Graphic(polygon, symbol);
graphics.add(graphic);
}
else
{
console.log("showStoredTiles: ", err);
}
}.bind(this));
}
}
});
| limit levels | tiles/main.js | limit levels | <ide><path>iles/main.js
<ide> var minLevel = parseInt(dojo.byId('minLevel').value);
<ide> var maxLevel = parseInt(dojo.byId('maxLevel').value);
<ide>
<del> if( maxLevel > zoomLevel + 3)
<del> {
<del> maxLevel = zoomLevel + 3;
<add> if( maxLevel > zoomLevel + 3 || maxLevel > basemapLayer.maxLevel)
<add> {
<add> maxLevel = Math.min(basemapLayer.maxLevel, zoomLevel + 3);
<ide> dojo.byId('maxLevel').value = maxLevel;
<ide> }
<ide> |
|
Java | mit | f8f25c31be7205906c92ae5b8ed5ada7e56efe4f | 0 | my7seven/jenkins,shahharsh/jenkins,csimons/jenkins,Ykus/jenkins,chbiel/jenkins,duzifang/my-jenkins,hemantojhaa/jenkins,MichaelPranovich/jenkins_sc,godfath3r/jenkins,noikiy/jenkins,6WIND/jenkins,yonglehou/jenkins,mrooney/jenkins,protazy/jenkins,jcarrothers-sap/jenkins,MadsNielsen/jtemp,oleg-nenashev/jenkins,luoqii/jenkins,brunocvcunha/jenkins,jglick/jenkins,Ykus/jenkins,damianszczepanik/jenkins,svanoort/jenkins,tangkun75/jenkins,ChrisA89/jenkins,mattclark/jenkins,lilyJi/jenkins,CodeShane/jenkins,jtnord/jenkins,maikeffi/hudson,evernat/jenkins,intelchen/jenkins,dariver/jenkins,elkingtonmcb/jenkins,soenter/jenkins,mrobinet/jenkins,iqstack/jenkins,albers/jenkins,jhoblitt/jenkins,synopsys-arc-oss/jenkins,duzifang/my-jenkins,protazy/jenkins,aheritier/jenkins,khmarbaise/jenkins,jcsirot/jenkins,pantheon-systems/jenkins,lvotypko/jenkins3,jpederzolli/jenkins-1,gorcz/jenkins,lvotypko/jenkins2,svanoort/jenkins,maikeffi/hudson,thomassuckow/jenkins,ns163/jenkins,morficus/jenkins,mdonohue/jenkins,292388900/jenkins,vijayto/jenkins,rashmikanta-1984/jenkins,bkmeneguello/jenkins,morficus/jenkins,pantheon-systems/jenkins,elkingtonmcb/jenkins,jcsirot/jenkins,damianszczepanik/jenkins,tfennelly/jenkins,jhoblitt/jenkins,khmarbaise/jenkins,ikedam/jenkins,stefanbrausch/hudson-main,vlajos/jenkins,AustinKwang/jenkins,mrooney/jenkins,keyurpatankar/hudson,akshayabd/jenkins,paulmillar/jenkins,stefanbrausch/hudson-main,FarmGeek4Life/jenkins,MarkEWaite/jenkins,FarmGeek4Life/jenkins,wangyikai/jenkins,amuniz/jenkins,sathiya-mit/jenkins,patbos/jenkins,liorhson/jenkins,Krasnyanskiy/jenkins,MarkEWaite/jenkins,wuwen5/jenkins,samatdav/jenkins,MarkEWaite/jenkins,lordofthejars/jenkins,6WIND/jenkins,dbroady1/jenkins,hashar/jenkins,sathiya-mit/jenkins,christ66/jenkins,chbiel/jenkins,keyurpatankar/hudson,goldchang/jenkins,akshayabd/jenkins,daniel-beck/jenkins,pselle/jenkins,dariver/jenkins,dariver/jenkins,jglick/jenkins,yonglehou/jenkins,MichaelPranovich/jenkins_sc,daniel-beck/jenkins,deadmoose/jenkins,godfath3r/jenkins,vivek/hudson,rlugojr/jenkins,yonglehou/jenkins,lvotypko/jenkins,lvotypko/jenkins2,synopsys-arc-oss/jenkins,mrobinet/jenkins,damianszczepanik/jenkins,lordofthejars/jenkins,aduprat/jenkins,6WIND/jenkins,Vlatombe/jenkins,gusreiber/jenkins,AustinKwang/jenkins,khmarbaise/jenkins,dbroady1/jenkins,vjuranek/jenkins,mrobinet/jenkins,lvotypko/jenkins3,gitaccountforprashant/gittest,MarkEWaite/jenkins,huybrechts/hudson,mpeltonen/jenkins,DanielWeber/jenkins,jk47/jenkins,iterate/coding-dojo,MarkEWaite/jenkins,1and1/jenkins,msrb/jenkins,KostyaSha/jenkins,viqueen/jenkins,jtnord/jenkins,seanlin816/jenkins,pselle/jenkins,MichaelPranovich/jenkins_sc,lindzh/jenkins,FTG-003/jenkins,jenkinsci/jenkins,nandan4/Jenkins,lvotypko/jenkins3,christ66/jenkins,MichaelPranovich/jenkins_sc,bkmeneguello/jenkins,amuniz/jenkins,luoqii/jenkins,mrooney/jenkins,seanlin816/jenkins,arcivanov/jenkins,verbitan/jenkins,goldchang/jenkins,aheritier/jenkins,verbitan/jenkins,gorcz/jenkins,godfath3r/jenkins,huybrechts/hudson,mcanthony/jenkins,stephenc/jenkins,lvotypko/jenkins3,6WIND/jenkins,albers/jenkins,MarkEWaite/jenkins,pjanouse/jenkins,intelchen/jenkins,rashmikanta-1984/jenkins,jtnord/jenkins,amruthsoft9/Jenkis,csimons/jenkins,dennisjlee/jenkins,stefanbrausch/hudson-main,varmenise/jenkins,batmat/jenkins,batmat/jenkins,olivergondza/jenkins,singh88/jenkins,samatdav/jenkins,Vlatombe/jenkins,vvv444/jenkins,luoqii/jenkins,alvarolobato/jenkins,bkmeneguello/jenkins,brunocvcunha/jenkins,Ykus/jenkins,ErikVerheul/jenkins,SenolOzer/jenkins,maikeffi/hudson,jpbriend/jenkins,singh88/jenkins,daspilker/jenkins,mcanthony/jenkins,github-api-test-org/jenkins,intelchen/jenkins,v1v/jenkins,fbelzunc/jenkins,shahharsh/jenkins,jpbriend/jenkins,pantheon-systems/jenkins,arcivanov/jenkins,NehemiahMi/jenkins,luoqii/jenkins,abayer/jenkins,vivek/hudson,Jimilian/jenkins,gitaccountforprashant/gittest,paulmillar/jenkins,amruthsoft9/Jenkis,christ66/jenkins,tastatur/jenkins,wangyikai/jenkins,DoctorQ/jenkins,KostyaSha/jenkins,hplatou/jenkins,my7seven/jenkins,tfennelly/jenkins,DanielWeber/jenkins,vijayto/jenkins,paulmillar/jenkins,vijayto/jenkins,liupugong/jenkins,oleg-nenashev/jenkins,guoxu0514/jenkins,iqstack/jenkins,dbroady1/jenkins,kzantow/jenkins,recena/jenkins,synopsys-arc-oss/jenkins,duzifang/my-jenkins,rlugojr/jenkins,keyurpatankar/hudson,lindzh/jenkins,ndeloof/jenkins,samatdav/jenkins,nandan4/Jenkins,NehemiahMi/jenkins,patbos/jenkins,hemantojhaa/jenkins,mcanthony/jenkins,kohsuke/hudson,yonglehou/jenkins,oleg-nenashev/jenkins,mpeltonen/jenkins,arunsingh/jenkins,292388900/jenkins,daspilker/jenkins,my7seven/jenkins,kohsuke/hudson,github-api-test-org/jenkins,escoem/jenkins,khmarbaise/jenkins,ChrisA89/jenkins,hplatou/jenkins,godfath3r/jenkins,tfennelly/jenkins,paulmillar/jenkins,viqueen/jenkins,damianszczepanik/jenkins,rsandell/jenkins,wangyikai/jenkins,luoqii/jenkins,guoxu0514/jenkins,github-api-test-org/jenkins,lvotypko/jenkins3,escoem/jenkins,batmat/jenkins,jhoblitt/jenkins,duzifang/my-jenkins,elkingtonmcb/jenkins,yonglehou/jenkins,SebastienGllmt/jenkins,aldaris/jenkins,Vlatombe/jenkins,azweb76/jenkins,viqueen/jenkins,6WIND/jenkins,mrobinet/jenkins,christ66/jenkins,seanlin816/jenkins,ns163/jenkins,jcarrothers-sap/jenkins,amruthsoft9/Jenkis,ChrisA89/jenkins,shahharsh/jenkins,mcanthony/jenkins,v1v/jenkins,rashmikanta-1984/jenkins,mrobinet/jenkins,hashar/jenkins,iqstack/jenkins,verbitan/jenkins,lvotypko/jenkins,thomassuckow/jenkins,stephenc/jenkins,bpzhang/jenkins,CodeShane/jenkins,keyurpatankar/hudson,godfath3r/jenkins,everyonce/jenkins,thomassuckow/jenkins,aduprat/jenkins,daniel-beck/jenkins,gusreiber/jenkins,bkmeneguello/jenkins,viqueen/jenkins,soenter/jenkins,jhoblitt/jenkins,amuniz/jenkins,batmat/jenkins,ydubreuil/jenkins,albers/jenkins,my7seven/jenkins,scoheb/jenkins,arunsingh/jenkins,pjanouse/jenkins,arunsingh/jenkins,jcarrothers-sap/jenkins,ydubreuil/jenkins,singh88/jenkins,MadsNielsen/jtemp,noikiy/jenkins,huybrechts/hudson,csimons/jenkins,thomassuckow/jenkins,keyurpatankar/hudson,Jimilian/jenkins,goldchang/jenkins,csimons/jenkins,thomassuckow/jenkins,shahharsh/jenkins,MadsNielsen/jtemp,mpeltonen/jenkins,jpederzolli/jenkins-1,vivek/hudson,SenolOzer/jenkins,wuwen5/jenkins,liorhson/jenkins,liorhson/jenkins,Vlatombe/jenkins,chbiel/jenkins,godfath3r/jenkins,samatdav/jenkins,dennisjlee/jenkins,aquarellian/jenkins,gitaccountforprashant/gittest,everyonce/jenkins,wuwen5/jenkins,Jochen-A-Fuerbacher/jenkins,stephenc/jenkins,ikedam/jenkins,Jochen-A-Fuerbacher/jenkins,deadmoose/jenkins,keyurpatankar/hudson,kohsuke/hudson,AustinKwang/jenkins,chbiel/jenkins,huybrechts/hudson,Ykus/jenkins,albers/jenkins,ajshastri/jenkins,AustinKwang/jenkins,pjanouse/jenkins,rlugojr/jenkins,jglick/jenkins,msrb/jenkins,vjuranek/jenkins,patbos/jenkins,vlajos/jenkins,1and1/jenkins,msrb/jenkins,h4ck3rm1k3/jenkins,hashar/jenkins,guoxu0514/jenkins,vvv444/jenkins,paulwellnerbou/jenkins,lvotypko/jenkins2,patbos/jenkins,paulmillar/jenkins,github-api-test-org/jenkins,Jochen-A-Fuerbacher/jenkins,292388900/jenkins,patbos/jenkins,recena/jenkins,AustinKwang/jenkins,aldaris/jenkins,Jimilian/jenkins,jk47/jenkins,iterate/coding-dojo,andresrc/jenkins,lilyJi/jenkins,morficus/jenkins,aduprat/jenkins,pjanouse/jenkins,aduprat/jenkins,deadmoose/jenkins,abayer/jenkins,fbelzunc/jenkins,arcivanov/jenkins,scoheb/jenkins,tfennelly/jenkins,keyurpatankar/hudson,1and1/jenkins,fbelzunc/jenkins,ikedam/jenkins,petermarcoen/jenkins,petermarcoen/jenkins,v1v/jenkins,shahharsh/jenkins,gusreiber/jenkins,synopsys-arc-oss/jenkins,dbroady1/jenkins,dariver/jenkins,jenkinsci/jenkins,dbroady1/jenkins,lvotypko/jenkins3,andresrc/jenkins,viqueen/jenkins,rlugojr/jenkins,lvotypko/jenkins,duzifang/my-jenkins,lindzh/jenkins,brunocvcunha/jenkins,maikeffi/hudson,brunocvcunha/jenkins,wangyikai/jenkins,jenkinsci/jenkins,lilyJi/jenkins,viqueen/jenkins,sathiya-mit/jenkins,andresrc/jenkins,KostyaSha/jenkins,jpederzolli/jenkins-1,paulmillar/jenkins,goldchang/jenkins,amuniz/jenkins,rsandell/jenkins,lordofthejars/jenkins,FarmGeek4Life/jenkins,vlajos/jenkins,mcanthony/jenkins,everyonce/jenkins,escoem/jenkins,escoem/jenkins,CodeShane/jenkins,my7seven/jenkins,bpzhang/jenkins,iterate/coding-dojo,h4ck3rm1k3/jenkins,singh88/jenkins,Krasnyanskiy/jenkins,MichaelPranovich/jenkins_sc,wuwen5/jenkins,ndeloof/jenkins,jzjzjzj/jenkins,synopsys-arc-oss/jenkins,tastatur/jenkins,lindzh/jenkins,goldchang/jenkins,hashar/jenkins,mrobinet/jenkins,ChrisA89/jenkins,aheritier/jenkins,mdonohue/jenkins,godfath3r/jenkins,azweb76/jenkins,liupugong/jenkins,ns163/jenkins,morficus/jenkins,gorcz/jenkins,christ66/jenkins,aquarellian/jenkins,vjuranek/jenkins,vivek/hudson,soenter/jenkins,jk47/jenkins,dbroady1/jenkins,petermarcoen/jenkins,SenolOzer/jenkins,soenter/jenkins,jenkinsci/jenkins,292388900/jenkins,daniel-beck/jenkins,KostyaSha/jenkins,Wilfred/jenkins,SebastienGllmt/jenkins,daniel-beck/jenkins,evernat/jenkins,shahharsh/jenkins,jenkinsci/jenkins,Ykus/jenkins,jzjzjzj/jenkins,azweb76/jenkins,1and1/jenkins,DanielWeber/jenkins,ydubreuil/jenkins,mattclark/jenkins,rsandell/jenkins,FarmGeek4Life/jenkins,fbelzunc/jenkins,maikeffi/hudson,NehemiahMi/jenkins,DoctorQ/jenkins,lvotypko/jenkins,protazy/jenkins,ydubreuil/jenkins,olivergondza/jenkins,rlugojr/jenkins,ndeloof/jenkins,stephenc/jenkins,wuwen5/jenkins,sathiya-mit/jenkins,Wilfred/jenkins,scoheb/jenkins,shahharsh/jenkins,mpeltonen/jenkins,yonglehou/jenkins,dennisjlee/jenkins,amruthsoft9/Jenkis,jenkinsci/jenkins,SebastienGllmt/jenkins,6WIND/jenkins,jpbriend/jenkins,AustinKwang/jenkins,arcivanov/jenkins,vijayto/jenkins,DoctorQ/jenkins,paulwellnerbou/jenkins,thomassuckow/jenkins,hudson/hudson-2.x,paulwellnerbou/jenkins,rsandell/jenkins,daniel-beck/jenkins,thomassuckow/jenkins,albers/jenkins,jhoblitt/jenkins,liorhson/jenkins,CodeShane/jenkins,paulwellnerbou/jenkins,vvv444/jenkins,mrooney/jenkins,Wilfred/jenkins,jpederzolli/jenkins-1,petermarcoen/jenkins,guoxu0514/jenkins,DanielWeber/jenkins,ErikVerheul/jenkins,pjanouse/jenkins,guoxu0514/jenkins,noikiy/jenkins,ErikVerheul/jenkins,goldchang/jenkins,1and1/jenkins,liorhson/jenkins,nandan4/Jenkins,tfennelly/jenkins,dariver/jenkins,patbos/jenkins,akshayabd/jenkins,liupugong/jenkins,vvv444/jenkins,duzifang/my-jenkins,vlajos/jenkins,ikedam/jenkins,daspilker/jenkins,vjuranek/jenkins,damianszczepanik/jenkins,arunsingh/jenkins,dbroady1/jenkins,batmat/jenkins,v1v/jenkins,lvotypko/jenkins2,nandan4/Jenkins,gitaccountforprashant/gittest,vlajos/jenkins,singh88/jenkins,lilyJi/jenkins,tastatur/jenkins,dennisjlee/jenkins,protazy/jenkins,aldaris/jenkins,aldaris/jenkins,iterate/coding-dojo,patbos/jenkins,jtnord/jenkins,jcsirot/jenkins,deadmoose/jenkins,vlajos/jenkins,wangyikai/jenkins,lordofthejars/jenkins,Wilfred/jenkins,recena/jenkins,jzjzjzj/jenkins,DoctorQ/jenkins,dariver/jenkins,mrooney/jenkins,MarkEWaite/jenkins,svanoort/jenkins,gitaccountforprashant/gittest,keyurpatankar/hudson,jenkinsci/jenkins,kohsuke/hudson,akshayabd/jenkins,arcivanov/jenkins,FTG-003/jenkins,scoheb/jenkins,SenolOzer/jenkins,tfennelly/jenkins,azweb76/jenkins,tastatur/jenkins,samatdav/jenkins,jcarrothers-sap/jenkins,soenter/jenkins,ajshastri/jenkins,singh88/jenkins,vivek/hudson,iqstack/jenkins,CodeShane/jenkins,lvotypko/jenkins,arunsingh/jenkins,ajshastri/jenkins,FTG-003/jenkins,damianszczepanik/jenkins,mrobinet/jenkins,iterate/coding-dojo,varmenise/jenkins,kzantow/jenkins,lvotypko/jenkins2,h4ck3rm1k3/jenkins,jcsirot/jenkins,jtnord/jenkins,tastatur/jenkins,alvarolobato/jenkins,kzantow/jenkins,christ66/jenkins,recena/jenkins,SenolOzer/jenkins,jcsirot/jenkins,pantheon-systems/jenkins,tangkun75/jenkins,ndeloof/jenkins,alvarolobato/jenkins,vvv444/jenkins,ydubreuil/jenkins,albers/jenkins,mpeltonen/jenkins,azweb76/jenkins,akshayabd/jenkins,liupugong/jenkins,hemantojhaa/jenkins,msrb/jenkins,abayer/jenkins,lilyJi/jenkins,Jimilian/jenkins,github-api-test-org/jenkins,292388900/jenkins,verbitan/jenkins,gusreiber/jenkins,6WIND/jenkins,recena/jenkins,jcarrothers-sap/jenkins,everyonce/jenkins,akshayabd/jenkins,pantheon-systems/jenkins,verbitan/jenkins,arunsingh/jenkins,Vlatombe/jenkins,DoctorQ/jenkins,h4ck3rm1k3/jenkins,kzantow/jenkins,ikedam/jenkins,abayer/jenkins,AustinKwang/jenkins,andresrc/jenkins,github-api-test-org/jenkins,dennisjlee/jenkins,ErikVerheul/jenkins,mpeltonen/jenkins,MadsNielsen/jtemp,jhoblitt/jenkins,chbiel/jenkins,daspilker/jenkins,Jochen-A-Fuerbacher/jenkins,iterate/coding-dojo,jpederzolli/jenkins-1,andresrc/jenkins,daspilker/jenkins,mdonohue/jenkins,viqueen/jenkins,deadmoose/jenkins,v1v/jenkins,hplatou/jenkins,hudson/hudson-2.x,ns163/jenkins,vlajos/jenkins,292388900/jenkins,ErikVerheul/jenkins,everyonce/jenkins,khmarbaise/jenkins,Wilfred/jenkins,noikiy/jenkins,Krasnyanskiy/jenkins,Krasnyanskiy/jenkins,tangkun75/jenkins,synopsys-arc-oss/jenkins,wuwen5/jenkins,paulwellnerbou/jenkins,NehemiahMi/jenkins,stephenc/jenkins,guoxu0514/jenkins,scoheb/jenkins,v1v/jenkins,intelchen/jenkins,brunocvcunha/jenkins,mattclark/jenkins,protazy/jenkins,tfennelly/jenkins,bpzhang/jenkins,DoctorQ/jenkins,oleg-nenashev/jenkins,sathiya-mit/jenkins,FTG-003/jenkins,rsandell/jenkins,seanlin816/jenkins,gorcz/jenkins,Krasnyanskiy/jenkins,svanoort/jenkins,jcsirot/jenkins,varmenise/jenkins,ErikVerheul/jenkins,evernat/jenkins,gorcz/jenkins,MichaelPranovich/jenkins_sc,khmarbaise/jenkins,lvotypko/jenkins2,pjanouse/jenkins,hemantojhaa/jenkins,h4ck3rm1k3/jenkins,hashar/jenkins,Vlatombe/jenkins,aquarellian/jenkins,aheritier/jenkins,stephenc/jenkins,brunocvcunha/jenkins,hashar/jenkins,DanielWeber/jenkins,lvotypko/jenkins,scoheb/jenkins,jcarrothers-sap/jenkins,wangyikai/jenkins,liupugong/jenkins,pantheon-systems/jenkins,gorcz/jenkins,paulmillar/jenkins,liupugong/jenkins,lindzh/jenkins,github-api-test-org/jenkins,h4ck3rm1k3/jenkins,lvotypko/jenkins,vijayto/jenkins,goldchang/jenkins,verbitan/jenkins,escoem/jenkins,soenter/jenkins,svanoort/jenkins,stefanbrausch/hudson-main,jk47/jenkins,csimons/jenkins,stefanbrausch/hudson-main,akshayabd/jenkins,elkingtonmcb/jenkins,shahharsh/jenkins,pselle/jenkins,albers/jenkins,evernat/jenkins,hashar/jenkins,stefanbrausch/hudson-main,ns163/jenkins,huybrechts/hudson,lordofthejars/jenkins,iqstack/jenkins,olivergondza/jenkins,intelchen/jenkins,alvarolobato/jenkins,mcanthony/jenkins,hemantojhaa/jenkins,NehemiahMi/jenkins,ajshastri/jenkins,Jochen-A-Fuerbacher/jenkins,ErikVerheul/jenkins,jpederzolli/jenkins-1,recena/jenkins,KostyaSha/jenkins,elkingtonmcb/jenkins,ns163/jenkins,bkmeneguello/jenkins,ikedam/jenkins,ajshastri/jenkins,rsandell/jenkins,gitaccountforprashant/gittest,noikiy/jenkins,rsandell/jenkins,amuniz/jenkins,bpzhang/jenkins,hudson/hudson-2.x,Ykus/jenkins,paulwellnerbou/jenkins,svanoort/jenkins,oleg-nenashev/jenkins,huybrechts/hudson,liupugong/jenkins,jzjzjzj/jenkins,jpederzolli/jenkins-1,csimons/jenkins,jk47/jenkins,v1v/jenkins,ydubreuil/jenkins,aheritier/jenkins,Vlatombe/jenkins,SebastienGllmt/jenkins,vjuranek/jenkins,SebastienGllmt/jenkins,luoqii/jenkins,intelchen/jenkins,everyonce/jenkins,evernat/jenkins,fbelzunc/jenkins,292388900/jenkins,ajshastri/jenkins,stefanbrausch/hudson-main,jk47/jenkins,jpbriend/jenkins,fbelzunc/jenkins,stephenc/jenkins,protazy/jenkins,Jochen-A-Fuerbacher/jenkins,maikeffi/hudson,kohsuke/hudson,soenter/jenkins,andresrc/jenkins,iterate/coding-dojo,deadmoose/jenkins,NehemiahMi/jenkins,amuniz/jenkins,my7seven/jenkins,olivergondza/jenkins,petermarcoen/jenkins,nandan4/Jenkins,Jimilian/jenkins,wangyikai/jenkins,intelchen/jenkins,mattclark/jenkins,rashmikanta-1984/jenkins,amruthsoft9/Jenkis,pselle/jenkins,bkmeneguello/jenkins,vivek/hudson,mattclark/jenkins,mrooney/jenkins,DoctorQ/jenkins,jtnord/jenkins,hudson/hudson-2.x,1and1/jenkins,1and1/jenkins,my7seven/jenkins,vivek/hudson,lilyJi/jenkins,hemantojhaa/jenkins,hplatou/jenkins,vjuranek/jenkins,jhoblitt/jenkins,Jimilian/jenkins,jpbriend/jenkins,daniel-beck/jenkins,morficus/jenkins,guoxu0514/jenkins,morficus/jenkins,abayer/jenkins,protazy/jenkins,azweb76/jenkins,ChrisA89/jenkins,olivergondza/jenkins,tangkun75/jenkins,SenolOzer/jenkins,NehemiahMi/jenkins,aldaris/jenkins,ikedam/jenkins,SenolOzer/jenkins,recena/jenkins,paulwellnerbou/jenkins,Krasnyanskiy/jenkins,pselle/jenkins,MadsNielsen/jtemp,samatdav/jenkins,pjanouse/jenkins,jcarrothers-sap/jenkins,csimons/jenkins,CodeShane/jenkins,Ykus/jenkins,aquarellian/jenkins,kohsuke/hudson,jzjzjzj/jenkins,arcivanov/jenkins,jglick/jenkins,petermarcoen/jenkins,lordofthejars/jenkins,damianszczepanik/jenkins,rashmikanta-1984/jenkins,damianszczepanik/jenkins,escoem/jenkins,jzjzjzj/jenkins,kzantow/jenkins,aduprat/jenkins,nandan4/Jenkins,arcivanov/jenkins,vvv444/jenkins,lordofthejars/jenkins,christ66/jenkins,Wilfred/jenkins,varmenise/jenkins,goldchang/jenkins,SebastienGllmt/jenkins,chbiel/jenkins,alvarolobato/jenkins,hplatou/jenkins,svanoort/jenkins,kzantow/jenkins,abayer/jenkins,olivergondza/jenkins,hplatou/jenkins,Jochen-A-Fuerbacher/jenkins,jtnord/jenkins,duzifang/my-jenkins,varmenise/jenkins,mattclark/jenkins,h4ck3rm1k3/jenkins,pselle/jenkins,kohsuke/hudson,daniel-beck/jenkins,lvotypko/jenkins3,dennisjlee/jenkins,mdonohue/jenkins,varmenise/jenkins,seanlin816/jenkins,khmarbaise/jenkins,lindzh/jenkins,mattclark/jenkins,rsandell/jenkins,maikeffi/hudson,CodeShane/jenkins,evernat/jenkins,FTG-003/jenkins,KostyaSha/jenkins,ikedam/jenkins,oleg-nenashev/jenkins,fbelzunc/jenkins,mpeltonen/jenkins,seanlin816/jenkins,bkmeneguello/jenkins,hplatou/jenkins,DanielWeber/jenkins,seanlin816/jenkins,DanielWeber/jenkins,lindzh/jenkins,aduprat/jenkins,aldaris/jenkins,vjuranek/jenkins,daspilker/jenkins,daspilker/jenkins,lvotypko/jenkins2,vivek/hudson,MadsNielsen/jtemp,vijayto/jenkins,dariver/jenkins,escoem/jenkins,jenkinsci/jenkins,sathiya-mit/jenkins,github-api-test-org/jenkins,jcsirot/jenkins,jzjzjzj/jenkins,Jimilian/jenkins,gusreiber/jenkins,samatdav/jenkins,liorhson/jenkins,jpbriend/jenkins,alvarolobato/jenkins,ndeloof/jenkins,amruthsoft9/Jenkis,aquarellian/jenkins,tastatur/jenkins,Wilfred/jenkins,abayer/jenkins,tangkun75/jenkins,DoctorQ/jenkins,nandan4/Jenkins,MichaelPranovich/jenkins_sc,jcarrothers-sap/jenkins,liorhson/jenkins,hudson/hudson-2.x,vijayto/jenkins,msrb/jenkins,varmenise/jenkins,gitaccountforprashant/gittest,pselle/jenkins,kzantow/jenkins,batmat/jenkins,ChrisA89/jenkins,ajshastri/jenkins,jglick/jenkins,bpzhang/jenkins,aquarellian/jenkins,sathiya-mit/jenkins,pantheon-systems/jenkins,bpzhang/jenkins,msrb/jenkins,aduprat/jenkins,aheritier/jenkins,huybrechts/hudson,jglick/jenkins,FTG-003/jenkins,mdonohue/jenkins,gorcz/jenkins,batmat/jenkins,maikeffi/hudson,rashmikanta-1984/jenkins,hudson/hudson-2.x,MadsNielsen/jtemp,mrooney/jenkins,brunocvcunha/jenkins,wuwen5/jenkins,jk47/jenkins,oleg-nenashev/jenkins,gusreiber/jenkins,verbitan/jenkins,olivergondza/jenkins,vvv444/jenkins,FarmGeek4Life/jenkins,tastatur/jenkins,elkingtonmcb/jenkins,evernat/jenkins,jpbriend/jenkins,ndeloof/jenkins,SebastienGllmt/jenkins,FarmGeek4Life/jenkins,azweb76/jenkins,gusreiber/jenkins,dennisjlee/jenkins,mcanthony/jenkins,iqstack/jenkins,tangkun75/jenkins,noikiy/jenkins,everyonce/jenkins,elkingtonmcb/jenkins,ydubreuil/jenkins,andresrc/jenkins,jzjzjzj/jenkins,morficus/jenkins,mdonohue/jenkins,noikiy/jenkins,luoqii/jenkins,aquarellian/jenkins,petermarcoen/jenkins,ns163/jenkins,aheritier/jenkins,rashmikanta-1984/jenkins,jglick/jenkins,rlugojr/jenkins,alvarolobato/jenkins,lilyJi/jenkins,msrb/jenkins,singh88/jenkins,amruthsoft9/Jenkis,mdonohue/jenkins,scoheb/jenkins,Krasnyanskiy/jenkins,gorcz/jenkins,KostyaSha/jenkins,hemantojhaa/jenkins,kohsuke/hudson,deadmoose/jenkins,ndeloof/jenkins,tangkun75/jenkins,chbiel/jenkins,yonglehou/jenkins,synopsys-arc-oss/jenkins,MarkEWaite/jenkins,KostyaSha/jenkins,rlugojr/jenkins,amuniz/jenkins,iqstack/jenkins,arunsingh/jenkins,aldaris/jenkins,FTG-003/jenkins,FarmGeek4Life/jenkins,ChrisA89/jenkins,bpzhang/jenkins | package hudson;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.recipes.WithPlugin;
import org.apache.commons.io.FileUtils;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlButton;
import java.io.File;
/**
* @author Kohsuke Kawaguchi
*/
public class PluginManagerTest extends HudsonTestCase {
/**
* Manual submission form.
*/
public void testUpload() throws Exception {
HtmlPage page = new WebClient().goTo("pluginManager/advanced");
HtmlForm f = page.getFormByName("uploadPlugin");
File dir = env.temporaryDirectoryAllocator.allocate();
File plugin = new File(dir, "tasks.hpi");
FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/tasks.hpi"),plugin);
f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath());
f.submit((HtmlButton)last(f.getHtmlElementsByTagName("button")));
assertTrue( new File(hudson.getRootDir(),"plugins/tasks.hpi").exists() );
}
/**
* Tests the effect of {@link WithPlugin}.
*/
@WithPlugin("tasks.hpi")
public void testWithRecipe() throws Exception {
assertNotNull(hudson.getPlugin("tasks"));
}
}
| test/src/test/java/hudson/PluginManagerTest.java | package hudson;
import org.jvnet.hudson.test.HudsonTestCase;
import org.jvnet.hudson.test.recipes.WithPlugin;
import org.apache.commons.io.FileUtils;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlButton;
import java.io.File;
/**
* @author Kohsuke Kawaguchi
*/
public class PluginManagerTest extends HudsonTestCase {
/**
* Manual submission form.
*/
public void testUpload() throws Exception {
HtmlPage page = new WebClient().goTo("pluginManager/advanced");
HtmlForm f = page.getFormByName("uploadPlugin");
File dir = env.temporaryDirectoryAllocator.allocate();
File plugin = new File(dir, "tasks.hpi");
FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/tasks.hpi"),plugin);
f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath());
f.submit((HtmlButton)last(f.getHtmlElementsByTagName("button")));
assertTrue( new File(hudson.getRootDir(),"plugins/tasks.hpi").exists() );
}
@WithPlugin("tasks.hpi")
public void testWithRecipe() throws Exception {
assertNotNull(hudson.getPlugin("tasks"));
}
}
| doc improvement
git-svn-id: 28f34f9aa52bc55a5ddd5be9e183c5cccadc6ee4@13934 71c3de6d-444a-0410-be80-ed276b4c234a
| test/src/test/java/hudson/PluginManagerTest.java | doc improvement | <ide><path>est/src/test/java/hudson/PluginManagerTest.java
<ide> assertTrue( new File(hudson.getRootDir(),"plugins/tasks.hpi").exists() );
<ide> }
<ide>
<add> /**
<add> * Tests the effect of {@link WithPlugin}.
<add> */
<ide> @WithPlugin("tasks.hpi")
<ide> public void testWithRecipe() throws Exception {
<ide> assertNotNull(hudson.getPlugin("tasks")); |
|
Java | apache-2.0 | 82f4d5467d657a6555a953a8aa503e8da4412118 | 0 | liaozhoubei/NewWeather | package com.bei.newweather;
import android.content.Context;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.preference.PreferenceManager;
import android.text.format.Time;
import com.bei.newweather.sync.SunshineSyncAdapter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
/**
* Created by Administrator on 2016/10/14.
*/
public class Utility {
public static String getPreferredLocation(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_location_key),
context.getString(R.string.pref_location_default));
}
public static boolean isMetric(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_units_key),
context.getString(R.string.pref_units_metric))
.equals(context.getString(R.string.pref_units_metric));
}
public static String formatTemperature(Context context, double temperature) {
// Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert
// the values here.
String suffix = "\u00B0";
if (!isMetric(context)) {
temperature = (temperature * 1.8) + 32;
}
// For presentation, assume the user doesn't care about tenths of a degree.
return String.format(context.getString(R.string.format_temperature), temperature);
}
static String formatDate(long dateInMilliseconds) {
Date date = new Date(dateInMilliseconds);
return DateFormat.getDateInstance().format(date);
}
// Format used for storing dates in the database. ALso used for converting those strings
// back into date objects for comparison/processing.
public static final String DATE_FORMAT = "yyyyMMdd";
/**
* Helper method to convert the database representation of the date into something to display
* to users. As classy and polished a user experience as "20140102" is, we can do better.
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return a user-friendly representation of the date.
*/
public static String getFriendlyDayString(Context context, long dateInMillis) {
// The day string for forecast uses the following logic:
// For today: "Today, June 8"
// For tomorrow: "Tomorrow"
// For the next 5 days: "Wednesday" (just the day name)
// For all days after that: "Mon Jun 8"
Time time = new Time();
time.setToNow();
long currentTime = System.currentTimeMillis();
int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff);
int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff);
// If the date we're building the String for is today's date, the format
// is "Today, June 24"
if (julianDay == currentJulianDay) {
String today = context.getString(R.string.today);
int formatId = R.string.format_full_friendly_date;
return context.getString(
formatId,
today,
getFormattedMonthDay(context, dateInMillis));
} else if ( julianDay < currentJulianDay + 7 ) {
// If the input date is less than a week in the future, just return the day name.
return getDayName(context, dateInMillis);
} else {
// Otherwise, use the form "Mon Jun 3"
SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd");
return shortenedDateFormat.format(dateInMillis);
}
}
/**
* Given a day, returns just the name to use for that day.
* E.g "today", "tomorrow", "wednesday".
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return
*/
public static String getDayName(Context context, long dateInMillis) {
// If the date is today, return the localized version of "Today" instead of the actual
// day name.
Time t = new Time();
t.setToNow();
int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff);
int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff);
if (julianDay == currentJulianDay) {
return context.getString(R.string.today);
} else if ( julianDay == currentJulianDay +1 ) {
return context.getString(R.string.tomorrow);
} else {
Time time = new Time();
time.setToNow();
// Otherwise, the format is just the day of the week (e.g "Wednesday".
SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE");
return dayFormat.format(dateInMillis);
}
}
/**
* Converts db date format to the format "Month day", e.g "June 24".
* @param context Context to use for resource localization
* @param dateInMillis The db formatted date string, expected to be of the form specified
* in Utility.DATE_FORMAT
* @return The day in the form of a string formatted "December 6"
*/
public static String getFormattedMonthDay(Context context, long dateInMillis ) {
Time time = new Time();
time.setToNow();
SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT);
SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd");
String monthDayString = monthDayFormat.format(dateInMillis);
return monthDayString;
}
public static String getFormattedWind(Context context, float windSpeed, float degrees) {
int windFormat;
if (Utility.isMetric(context)) {
windFormat = R.string.format_wind_kmh;
} else {
windFormat = R.string.format_wind_mph;
windSpeed = .621371192237334f * windSpeed;
}
// From wind direction in degrees, determine compass direction as a string (e.g NW)
// You know what's fun, writing really long if/else statements with tons of possible
// conditions. Seriously, try it!
String direction = "Unknown";
if (degrees >= 337.5 || degrees < 22.5) {
direction = "N";
} else if (degrees >= 22.5 && degrees < 67.5) {
direction = "NE";
} else if (degrees >= 67.5 && degrees < 112.5) {
direction = "E";
} else if (degrees >= 112.5 && degrees < 157.5) {
direction = "SE";
} else if (degrees >= 157.5 && degrees < 202.5) {
direction = "S";
} else if (degrees >= 202.5 && degrees < 247.5) {
direction = "SW";
} else if (degrees >= 247.5 && degrees < 292.5) {
direction = "W";
} else if (degrees >= 292.5 && degrees < 337.5) {
direction = "NW";
}
return String.format(context.getString(windFormat), windSpeed, direction);
}
/**
* Helper method to provide the icon resource id according to the weather condition id returned
* by the OpenWeatherMap call.
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getIconResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.ic_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.ic_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.ic_rain;
} else if (weatherId == 511) {
return R.drawable.ic_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.ic_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.ic_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.ic_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.ic_storm;
} else if (weatherId == 800) {
return R.drawable.ic_clear;
} else if (weatherId == 801) {
return R.drawable.ic_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.ic_cloudy;
}
return -1;
}
/**
* Helper method to provide the art urls according to the weather condition id returned
* by the OpenWeatherMap call.
*
* @param context Context to use for retrieving the URL format
* @param weatherId from OpenWeatherMap API response
* @return url for the corresponding weather artwork. null if no relation is found.
*/
public static String getArtUrlForWeatherCondition(Context context, int weatherId) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
String formatArtUrl = prefs.getString(context.getString(R.string.pref_art_pack_key),
context.getString(R.string.pref_art_pack_sunshine));
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return String.format(Locale.US, formatArtUrl, "storm");
} else if (weatherId >= 300 && weatherId <= 321) {
return String.format(Locale.US, formatArtUrl, "light_rain");
} else if (weatherId >= 500 && weatherId <= 504) {
return String.format(Locale.US, formatArtUrl, "rain");
} else if (weatherId == 511) {
return String.format(Locale.US, formatArtUrl, "snow");
} else if (weatherId >= 520 && weatherId <= 531) {
return String.format(Locale.US, formatArtUrl, "rain");
} else if (weatherId >= 600 && weatherId <= 622) {
return String.format(Locale.US, formatArtUrl, "snow");
} else if (weatherId >= 701 && weatherId <= 761) {
return String.format(Locale.US, formatArtUrl, "fog");
} else if (weatherId == 761 || weatherId == 781) {
return String.format(Locale.US, formatArtUrl, "storm");
} else if (weatherId == 800) {
return String.format(Locale.US, formatArtUrl, "clear");
} else if (weatherId == 801) {
return String.format(Locale.US, formatArtUrl, "light_clouds");
} else if (weatherId >= 802 && weatherId <= 804) {
return String.format(Locale.US, formatArtUrl, "clouds");
}
return null;
}
/**
* Helper method to provide the art resource id according to the weather condition id returned
* by the OpenWeatherMap call.
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getArtResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.art_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.art_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.art_rain;
} else if (weatherId == 511) {
return R.drawable.art_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.art_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.art_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.art_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.art_storm;
} else if (weatherId == 800) {
return R.drawable.art_clear;
} else if (weatherId == 801) {
return R.drawable.art_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.art_clouds;
}
return -1;
}
/**
* Helper method to provide the string according to the weather
* condition id returned by the OpenWeatherMap call.
* @param context Android context
* @param weatherId from OpenWeatherMap API response
* @return string for the weather condition. null if no relation is found.
*/
public static String getStringForWeatherCondition(Context context, int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
int stringId;
if (weatherId >= 200 && weatherId <= 232) {
stringId = R.string.condition_2xx;
} else if (weatherId >= 300 && weatherId <= 321) {
stringId = R.string.condition_3xx;
} else switch(weatherId) {
case 500:
stringId = R.string.condition_500;
break;
case 501:
stringId = R.string.condition_501;
break;
case 502:
stringId = R.string.condition_502;
break;
case 503:
stringId = R.string.condition_503;
break;
case 504:
stringId = R.string.condition_504;
break;
case 511:
stringId = R.string.condition_511;
break;
case 520:
stringId = R.string.condition_520;
break;
case 531:
stringId = R.string.condition_531;
break;
case 600:
stringId = R.string.condition_600;
break;
case 601:
stringId = R.string.condition_601;
break;
case 602:
stringId = R.string.condition_602;
break;
case 611:
stringId = R.string.condition_611;
break;
case 612:
stringId = R.string.condition_612;
break;
case 615:
stringId = R.string.condition_615;
break;
case 616:
stringId = R.string.condition_616;
break;
case 620:
stringId = R.string.condition_620;
break;
case 621:
stringId = R.string.condition_621;
break;
case 622:
stringId = R.string.condition_622;
break;
case 701:
stringId = R.string.condition_701;
break;
case 711:
stringId = R.string.condition_711;
break;
case 721:
stringId = R.string.condition_721;
break;
case 731:
stringId = R.string.condition_731;
break;
case 741:
stringId = R.string.condition_741;
break;
case 751:
stringId = R.string.condition_751;
break;
case 761:
stringId = R.string.condition_761;
break;
case 762:
stringId = R.string.condition_762;
break;
case 771:
stringId = R.string.condition_771;
break;
case 781:
stringId = R.string.condition_781;
break;
case 800:
stringId = R.string.condition_800;
break;
case 801:
stringId = R.string.condition_801;
break;
case 802:
stringId = R.string.condition_802;
break;
case 803:
stringId = R.string.condition_803;
break;
case 804:
stringId = R.string.condition_804;
break;
case 900:
stringId = R.string.condition_900;
break;
case 901:
stringId = R.string.condition_901;
break;
case 902:
stringId = R.string.condition_902;
break;
case 903:
stringId = R.string.condition_903;
break;
case 904:
stringId = R.string.condition_904;
break;
case 905:
stringId = R.string.condition_905;
break;
case 906:
stringId = R.string.condition_906;
break;
case 951:
stringId = R.string.condition_951;
break;
case 952:
stringId = R.string.condition_952;
break;
case 953:
stringId = R.string.condition_953;
break;
case 954:
stringId = R.string.condition_954;
break;
case 955:
stringId = R.string.condition_955;
break;
case 956:
stringId = R.string.condition_956;
break;
case 957:
stringId = R.string.condition_957;
break;
case 958:
stringId = R.string.condition_958;
break;
case 959:
stringId = R.string.condition_959;
break;
case 960:
stringId = R.string.condition_960;
break;
case 961:
stringId = R.string.condition_961;
break;
case 962:
stringId = R.string.condition_962;
break;
default:
return context.getString(R.string.condition_unknown, weatherId);
}
return context.getString(stringId);
}
/**
* Returns true if the network is available or about to become available.
*
* @param c Context used to get the ConnectivityManager
* @return true if the network is available
*/
static public boolean isNetworkAvailable(Context c) {
ConnectivityManager cm =
(ConnectivityManager)c.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
return activeNetwork != null &&
activeNetwork.isConnectedOrConnecting();
}
/**
*
* @param c Context used to get the SharedPreferences
* @return the location status integer type
*/
@SuppressWarnings("ResourceType")
static public @SunshineSyncAdapter.LocationStatus
int getLocationStatus(Context c){
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
return sp.getInt(c.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
}
/**
* Resets the location status. (Sets it to SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN)
* @param c Context used to get the SharedPreferences
*/
static public void resetLocationStatus(Context c){
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
SharedPreferences.Editor spe = sp.edit();
spe.putInt(c.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
spe.apply();
}
} | app/src/main/java/com/bei/newweather/Utility.java | package com.bei.newweather;
import android.content.Context;
import android.content.SharedPreferences;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.preference.PreferenceManager;
import android.text.format.Time;
import com.bei.newweather.sync.SunshineSyncAdapter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Created by Administrator on 2016/10/14.
*/
public class Utility {
public static String getPreferredLocation(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_location_key),
context.getString(R.string.pref_location_default));
}
public static boolean isMetric(Context context) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
return prefs.getString(context.getString(R.string.pref_units_key),
context.getString(R.string.pref_units_metric))
.equals(context.getString(R.string.pref_units_metric));
}
public static String formatTemperature(Context context, double temperature) {
// double temp;
// if (!isMetric) {
// temp = 9 * temperature / 5 + 32;
// } else {
// temp = temperature;
// }
// return context.getString(R.string.format_temperature, temp);
// Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert
// the values here.
String suffix = "\u00B0";
if (!isMetric(context)) {
temperature = (temperature * 1.8) + 32;
}
// For presentation, assume the user doesn't care about tenths of a degree.
return String.format(context.getString(R.string.format_temperature), temperature);
}
static String formatDate(long dateInMilliseconds) {
Date date = new Date(dateInMilliseconds);
return DateFormat.getDateInstance().format(date);
}
// Format used for storing dates in the database. ALso used for converting those strings
// back into date objects for comparison/processing.
public static final String DATE_FORMAT = "yyyyMMdd";
/**
* Helper method to convert the database representation of the date into something to display
* to users. As classy and polished a user experience as "20140102" is, we can do better.
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return a user-friendly representation of the date.
*/
public static String getFriendlyDayString(Context context, long dateInMillis) {
// The day string for forecast uses the following logic:
// For today: "Today, June 8"
// For tomorrow: "Tomorrow"
// For the next 5 days: "Wednesday" (just the day name)
// For all days after that: "Mon Jun 8"
Time time = new Time();
time.setToNow();
long currentTime = System.currentTimeMillis();
int julianDay = Time.getJulianDay(dateInMillis, time.gmtoff);
int currentJulianDay = Time.getJulianDay(currentTime, time.gmtoff);
// If the date we're building the String for is today's date, the format
// is "Today, June 24"
if (julianDay == currentJulianDay) {
String today = context.getString(R.string.today);
int formatId = R.string.format_full_friendly_date;
return context.getString(
formatId,
today,
getFormattedMonthDay(context, dateInMillis));
} else if (julianDay < currentJulianDay + 7) {
// If the input date is less than a week in the future, just return the day name.
return getDayName(context, dateInMillis);
} else {
// Otherwise, use the form "Mon Jun 3"
SimpleDateFormat shortenedDateFormat = new SimpleDateFormat("EEE MMM dd");
return shortenedDateFormat.format(dateInMillis);
}
}
/**
* Given a day, returns just the name to use for that day.
* E.g "today", "tomorrow", "wednesday".
*
* @param context Context to use for resource localization
* @param dateInMillis The date in milliseconds
* @return
*/
public static String getDayName(Context context, long dateInMillis) {
// If the date is today, return the localized version of "Today" instead of the actual
// day name.
Time t = new Time();
t.setToNow();
int julianDay = Time.getJulianDay(dateInMillis, t.gmtoff);
int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff);
if (julianDay == currentJulianDay) {
return context.getString(R.string.today);
} else if (julianDay == currentJulianDay + 1) {
return context.getString(R.string.tomorrow);
} else {
Time time = new Time();
time.setToNow();
// Otherwise, the format is just the day of the week (e.g "Wednesday".
SimpleDateFormat dayFormat = new SimpleDateFormat("EEEE");
return dayFormat.format(dateInMillis);
}
}
/**
* Converts db date format to the format "Month day", e.g "June 24".
*
* @param context Context to use for resource localization
* @param dateInMillis The db formatted date string, expected to be of the form specified
* in Utility.DATE_FORMAT
* @return The day in the form of a string formatted "December 6"
*/
public static String getFormattedMonthDay(Context context, long dateInMillis) {
Time time = new Time();
time.setToNow();
SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT);
SimpleDateFormat monthDayFormat = new SimpleDateFormat("MMMM dd");
String monthDayString = monthDayFormat.format(dateInMillis);
return monthDayString;
}
public static String getFormattedWind(Context context, float windSpeed, float degrees) {
int windFormat;
if (Utility.isMetric(context)) {
windFormat = R.string.format_wind_kmh;
} else {
windFormat = R.string.format_wind_mph;
windSpeed = .621371192237334f * windSpeed;
}
// From wind direction in degrees, determine compass direction as a string (e.g NW)
// You know what's fun, writing really long if/else statements with tons of possible
// conditions. Seriously, try it!
String direction = "Unknown";
if (degrees >= 337.5 || degrees < 22.5) {
direction = "N";
} else if (degrees >= 22.5 && degrees < 67.5) {
direction = "NE";
} else if (degrees >= 67.5 && degrees < 112.5) {
direction = "E";
} else if (degrees >= 112.5 && degrees < 157.5) {
direction = "SE";
} else if (degrees >= 157.5 && degrees < 202.5) {
direction = "S";
} else if (degrees >= 202.5 && degrees < 247.5) {
direction = "SW";
} else if (degrees >= 247.5 && degrees < 292.5) {
direction = "W";
} else if (degrees >= 292.5 && degrees < 337.5) {
direction = "NW";
}
return String.format(context.getString(windFormat), windSpeed, direction);
}
/**
* Helper method to provide the icon resource id according to the weather condition id returned
* by the OpenWeatherMap call.
*
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getIconResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.ic_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.ic_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.ic_rain;
} else if (weatherId == 511) {
return R.drawable.ic_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.ic_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.ic_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.ic_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.ic_storm;
} else if (weatherId == 800) {
return R.drawable.ic_clear;
} else if (weatherId == 801) {
return R.drawable.ic_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.ic_cloudy;
}
return -1;
}
/**
* Helper method to provide the art resource id according to the weather condition id returned
* by the OpenWeatherMap call.
*
* @param weatherId from OpenWeatherMap API response
* @return resource id for the corresponding icon. -1 if no relation is found.
*/
public static int getArtResourceForWeatherCondition(int weatherId) {
// Based on weather code data found at:
// http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
if (weatherId >= 200 && weatherId <= 232) {
return R.drawable.art_storm;
} else if (weatherId >= 300 && weatherId <= 321) {
return R.drawable.art_light_rain;
} else if (weatherId >= 500 && weatherId <= 504) {
return R.drawable.art_rain;
} else if (weatherId == 511) {
return R.drawable.art_snow;
} else if (weatherId >= 520 && weatherId <= 531) {
return R.drawable.art_rain;
} else if (weatherId >= 600 && weatherId <= 622) {
return R.drawable.art_snow;
} else if (weatherId >= 701 && weatherId <= 761) {
return R.drawable.art_fog;
} else if (weatherId == 761 || weatherId == 781) {
return R.drawable.art_storm;
} else if (weatherId == 800) {
return R.drawable.art_clear;
} else if (weatherId == 801) {
return R.drawable.art_light_clouds;
} else if (weatherId >= 802 && weatherId <= 804) {
return R.drawable.art_clouds;
}
return -1;
}
/**
* Returns true if the network is available or about to become available.
*
* @param context Context used to get the ConnectivityManager
* @return true if the network is available
*/
public static boolean isNetworkAvailable(Context context) {
ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetwork = connectivityManager.getActiveNetworkInfo();
return activeNetwork != null && activeNetwork.isConnectedOrConnecting();
}
/**
*
* @param context Context used to get the SharedPreferences
* @return the location status integer type
*/
@SuppressWarnings("ResourceType")
public static @SunshineSyncAdapter.LocationStatus
int getLocationStatus(Context context){
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context);
return sp.getInt(context.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
}
/**
* Resets the location status. (Sets it to SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN)
* @param c Context used to get the SharedPreferences
*/
static public void resetLocationStatus(Context c){
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
SharedPreferences.Editor spe = sp.edit();
spe.putInt(c.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
spe.apply();
}
}
| add custom View
| app/src/main/java/com/bei/newweather/Utility.java | add custom View | <ide><path>pp/src/main/java/com/bei/newweather/Utility.java
<ide> import java.text.DateFormat;
<ide> import java.text.SimpleDateFormat;
<ide> import java.util.Date;
<add>import java.util.Locale;
<ide>
<ide> /**
<ide> * Created by Administrator on 2016/10/14.
<ide> }
<ide>
<ide> public static String formatTemperature(Context context, double temperature) {
<del>// double temp;
<del>// if (!isMetric) {
<del>// temp = 9 * temperature / 5 + 32;
<del>// } else {
<del>// temp = temperature;
<del>// }
<del>// return context.getString(R.string.format_temperature, temp);
<del>
<ide> // Data stored in Celsius by default. If user prefers to see in Fahrenheit, convert
<ide> // the values here.
<ide> String suffix = "\u00B0";
<ide> * Helper method to convert the database representation of the date into something to display
<ide> * to users. As classy and polished a user experience as "20140102" is, we can do better.
<ide> *
<del> * @param context Context to use for resource localization
<add> * @param context Context to use for resource localization
<ide> * @param dateInMillis The date in milliseconds
<ide> * @return a user-friendly representation of the date.
<ide> */
<ide> formatId,
<ide> today,
<ide> getFormattedMonthDay(context, dateInMillis));
<del> } else if (julianDay < currentJulianDay + 7) {
<add> } else if ( julianDay < currentJulianDay + 7 ) {
<ide> // If the input date is less than a week in the future, just return the day name.
<ide> return getDayName(context, dateInMillis);
<ide> } else {
<ide> * Given a day, returns just the name to use for that day.
<ide> * E.g "today", "tomorrow", "wednesday".
<ide> *
<del> * @param context Context to use for resource localization
<add> * @param context Context to use for resource localization
<ide> * @param dateInMillis The date in milliseconds
<ide> * @return
<ide> */
<ide> int currentJulianDay = Time.getJulianDay(System.currentTimeMillis(), t.gmtoff);
<ide> if (julianDay == currentJulianDay) {
<ide> return context.getString(R.string.today);
<del> } else if (julianDay == currentJulianDay + 1) {
<add> } else if ( julianDay == currentJulianDay +1 ) {
<ide> return context.getString(R.string.tomorrow);
<ide> } else {
<ide> Time time = new Time();
<ide>
<ide> /**
<ide> * Converts db date format to the format "Month day", e.g "June 24".
<del> *
<del> * @param context Context to use for resource localization
<add> * @param context Context to use for resource localization
<ide> * @param dateInMillis The db formatted date string, expected to be of the form specified
<del> * in Utility.DATE_FORMAT
<add> * in Utility.DATE_FORMAT
<ide> * @return The day in the form of a string formatted "December 6"
<ide> */
<del> public static String getFormattedMonthDay(Context context, long dateInMillis) {
<add> public static String getFormattedMonthDay(Context context, long dateInMillis ) {
<ide> Time time = new Time();
<ide> time.setToNow();
<ide> SimpleDateFormat dbDateFormat = new SimpleDateFormat(Utility.DATE_FORMAT);
<ide> String monthDayString = monthDayFormat.format(dateInMillis);
<ide> return monthDayString;
<ide> }
<del>
<ide>
<ide> public static String getFormattedWind(Context context, float windSpeed, float degrees) {
<ide> int windFormat;
<ide> /**
<ide> * Helper method to provide the icon resource id according to the weather condition id returned
<ide> * by the OpenWeatherMap call.
<del> *
<ide> * @param weatherId from OpenWeatherMap API response
<ide> * @return resource id for the corresponding icon. -1 if no relation is found.
<ide> */
<ide> }
<ide>
<ide> /**
<add> * Helper method to provide the art urls according to the weather condition id returned
<add> * by the OpenWeatherMap call.
<add> *
<add> * @param context Context to use for retrieving the URL format
<add> * @param weatherId from OpenWeatherMap API response
<add> * @return url for the corresponding weather artwork. null if no relation is found.
<add> */
<add> public static String getArtUrlForWeatherCondition(Context context, int weatherId) {
<add> SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
<add> String formatArtUrl = prefs.getString(context.getString(R.string.pref_art_pack_key),
<add> context.getString(R.string.pref_art_pack_sunshine));
<add>
<add> // Based on weather code data found at:
<add> // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
<add> if (weatherId >= 200 && weatherId <= 232) {
<add> return String.format(Locale.US, formatArtUrl, "storm");
<add> } else if (weatherId >= 300 && weatherId <= 321) {
<add> return String.format(Locale.US, formatArtUrl, "light_rain");
<add> } else if (weatherId >= 500 && weatherId <= 504) {
<add> return String.format(Locale.US, formatArtUrl, "rain");
<add> } else if (weatherId == 511) {
<add> return String.format(Locale.US, formatArtUrl, "snow");
<add> } else if (weatherId >= 520 && weatherId <= 531) {
<add> return String.format(Locale.US, formatArtUrl, "rain");
<add> } else if (weatherId >= 600 && weatherId <= 622) {
<add> return String.format(Locale.US, formatArtUrl, "snow");
<add> } else if (weatherId >= 701 && weatherId <= 761) {
<add> return String.format(Locale.US, formatArtUrl, "fog");
<add> } else if (weatherId == 761 || weatherId == 781) {
<add> return String.format(Locale.US, formatArtUrl, "storm");
<add> } else if (weatherId == 800) {
<add> return String.format(Locale.US, formatArtUrl, "clear");
<add> } else if (weatherId == 801) {
<add> return String.format(Locale.US, formatArtUrl, "light_clouds");
<add> } else if (weatherId >= 802 && weatherId <= 804) {
<add> return String.format(Locale.US, formatArtUrl, "clouds");
<add> }
<add> return null;
<add> }
<add>
<add> /**
<ide> * Helper method to provide the art resource id according to the weather condition id returned
<ide> * by the OpenWeatherMap call.
<del> *
<ide> * @param weatherId from OpenWeatherMap API response
<ide> * @return resource id for the corresponding icon. -1 if no relation is found.
<ide> */
<ide> }
<ide>
<ide> /**
<add> * Helper method to provide the string according to the weather
<add> * condition id returned by the OpenWeatherMap call.
<add> * @param context Android context
<add> * @param weatherId from OpenWeatherMap API response
<add> * @return string for the weather condition. null if no relation is found.
<add> */
<add> public static String getStringForWeatherCondition(Context context, int weatherId) {
<add> // Based on weather code data found at:
<add> // http://bugs.openweathermap.org/projects/api/wiki/Weather_Condition_Codes
<add> int stringId;
<add> if (weatherId >= 200 && weatherId <= 232) {
<add> stringId = R.string.condition_2xx;
<add> } else if (weatherId >= 300 && weatherId <= 321) {
<add> stringId = R.string.condition_3xx;
<add> } else switch(weatherId) {
<add> case 500:
<add> stringId = R.string.condition_500;
<add> break;
<add> case 501:
<add> stringId = R.string.condition_501;
<add> break;
<add> case 502:
<add> stringId = R.string.condition_502;
<add> break;
<add> case 503:
<add> stringId = R.string.condition_503;
<add> break;
<add> case 504:
<add> stringId = R.string.condition_504;
<add> break;
<add> case 511:
<add> stringId = R.string.condition_511;
<add> break;
<add> case 520:
<add> stringId = R.string.condition_520;
<add> break;
<add> case 531:
<add> stringId = R.string.condition_531;
<add> break;
<add> case 600:
<add> stringId = R.string.condition_600;
<add> break;
<add> case 601:
<add> stringId = R.string.condition_601;
<add> break;
<add> case 602:
<add> stringId = R.string.condition_602;
<add> break;
<add> case 611:
<add> stringId = R.string.condition_611;
<add> break;
<add> case 612:
<add> stringId = R.string.condition_612;
<add> break;
<add> case 615:
<add> stringId = R.string.condition_615;
<add> break;
<add> case 616:
<add> stringId = R.string.condition_616;
<add> break;
<add> case 620:
<add> stringId = R.string.condition_620;
<add> break;
<add> case 621:
<add> stringId = R.string.condition_621;
<add> break;
<add> case 622:
<add> stringId = R.string.condition_622;
<add> break;
<add> case 701:
<add> stringId = R.string.condition_701;
<add> break;
<add> case 711:
<add> stringId = R.string.condition_711;
<add> break;
<add> case 721:
<add> stringId = R.string.condition_721;
<add> break;
<add> case 731:
<add> stringId = R.string.condition_731;
<add> break;
<add> case 741:
<add> stringId = R.string.condition_741;
<add> break;
<add> case 751:
<add> stringId = R.string.condition_751;
<add> break;
<add> case 761:
<add> stringId = R.string.condition_761;
<add> break;
<add> case 762:
<add> stringId = R.string.condition_762;
<add> break;
<add> case 771:
<add> stringId = R.string.condition_771;
<add> break;
<add> case 781:
<add> stringId = R.string.condition_781;
<add> break;
<add> case 800:
<add> stringId = R.string.condition_800;
<add> break;
<add> case 801:
<add> stringId = R.string.condition_801;
<add> break;
<add> case 802:
<add> stringId = R.string.condition_802;
<add> break;
<add> case 803:
<add> stringId = R.string.condition_803;
<add> break;
<add> case 804:
<add> stringId = R.string.condition_804;
<add> break;
<add> case 900:
<add> stringId = R.string.condition_900;
<add> break;
<add> case 901:
<add> stringId = R.string.condition_901;
<add> break;
<add> case 902:
<add> stringId = R.string.condition_902;
<add> break;
<add> case 903:
<add> stringId = R.string.condition_903;
<add> break;
<add> case 904:
<add> stringId = R.string.condition_904;
<add> break;
<add> case 905:
<add> stringId = R.string.condition_905;
<add> break;
<add> case 906:
<add> stringId = R.string.condition_906;
<add> break;
<add> case 951:
<add> stringId = R.string.condition_951;
<add> break;
<add> case 952:
<add> stringId = R.string.condition_952;
<add> break;
<add> case 953:
<add> stringId = R.string.condition_953;
<add> break;
<add> case 954:
<add> stringId = R.string.condition_954;
<add> break;
<add> case 955:
<add> stringId = R.string.condition_955;
<add> break;
<add> case 956:
<add> stringId = R.string.condition_956;
<add> break;
<add> case 957:
<add> stringId = R.string.condition_957;
<add> break;
<add> case 958:
<add> stringId = R.string.condition_958;
<add> break;
<add> case 959:
<add> stringId = R.string.condition_959;
<add> break;
<add> case 960:
<add> stringId = R.string.condition_960;
<add> break;
<add> case 961:
<add> stringId = R.string.condition_961;
<add> break;
<add> case 962:
<add> stringId = R.string.condition_962;
<add> break;
<add> default:
<add> return context.getString(R.string.condition_unknown, weatherId);
<add> }
<add> return context.getString(stringId);
<add> }
<add>
<add> /**
<ide> * Returns true if the network is available or about to become available.
<ide> *
<del> * @param context Context used to get the ConnectivityManager
<del> * @return true if the network is available
<del> */
<del> public static boolean isNetworkAvailable(Context context) {
<del> ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
<del> NetworkInfo activeNetwork = connectivityManager.getActiveNetworkInfo();
<del> return activeNetwork != null && activeNetwork.isConnectedOrConnecting();
<del> }
<del>
<del> /**
<del> *
<del> * @param context Context used to get the SharedPreferences
<del> * @return the location status integer type
<del> */
<add> * @param c Context used to get the ConnectivityManager
<add> * @return true if the network is available
<add> */
<add> static public boolean isNetworkAvailable(Context c) {
<add> ConnectivityManager cm =
<add> (ConnectivityManager)c.getSystemService(Context.CONNECTIVITY_SERVICE);
<add>
<add> NetworkInfo activeNetwork = cm.getActiveNetworkInfo();
<add> return activeNetwork != null &&
<add> activeNetwork.isConnectedOrConnecting();
<add> }
<add>
<add> /**
<add> *
<add> * @param c Context used to get the SharedPreferences
<add> * @return the location status integer type
<add> */
<ide> @SuppressWarnings("ResourceType")
<del> public static @SunshineSyncAdapter.LocationStatus
<del> int getLocationStatus(Context context){
<del> SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context);
<del> return sp.getInt(context.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
<add> static public @SunshineSyncAdapter.LocationStatus
<add> int getLocationStatus(Context c){
<add> SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(c);
<add> return sp.getInt(c.getString(R.string.pref_location_status_key), SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN);
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | 21427f5345603096e65357d042f66104d49578d7 | 0 | tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki,tateshitah/jspwiki | /*
JSPWiki - a JSP-based WikiWiki clone.
Copyright (C) 2001-2002 Janne Jalkanen ([email protected])
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.ecyrd.jspwiki.xmlrpc;
import java.io.*;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Category;
import org.apache.xmlrpc.XmlRpcServer;
import com.ecyrd.jspwiki.*;
/**
* Handles all incoming servlet requests for XML-RPC calls.
* <P>
* Uses two initialization parameters:
* <UL>
* <LI><B>handler</B> : the class which is used to handle the RPC calls.
* <LI><B>prefix</B> : The command prefix for that particular handler.
* </UL>
*
* @author Janne Jalkanen
* @since 1.6.6
*/
public class RPCServlet extends HttpServlet
{
/** This is what is appended to each command, if the handler has
not been specified. */
// FIXME: Should this be $default?
public static final String XMLRPC_PREFIX = "wiki";
private WikiEngine m_engine;
private XmlRpcServer m_xmlrpcServer = new XmlRpcServer();
Category log = Category.getInstance( RPCServlet.class );
public void initHandler( String prefix, String handlerName )
throws ClassNotFoundException,
InstantiationException,
IllegalAccessException
{
Class handlerClass = Class.forName( handlerName );
WikiRPCHandler rpchandler = (WikiRPCHandler) handlerClass.newInstance();
rpchandler.initialize( m_engine );
m_xmlrpcServer.addHandler( prefix, rpchandler );
}
/**
* Initializes the servlet.
*/
public void init( ServletConfig config )
throws ServletException
{
m_engine = WikiEngine.getInstance( config );
String handlerName = config.getInitParameter( "handler" );
String prefix = config.getInitParameter( "prefix" );
if( handlerName == null ) handlerName = "com.ecyrd.jspwiki.xmlrpc.RPCHandler";
if( prefix == null ) prefix = XMLRPC_PREFIX;
try
{
initHandler( prefix, handlerName );
//
// FIXME: The metaweblog API should be possible to turn off.
//
initHandler( "metaWeblog",
"com.ecyrd.jspwiki.xmlrpc.MetaWeblogHandler" );
}
catch( Exception e )
{
log.fatal("Unable to start RPC interface: ", e);
throw new ServletException( "No RPC interface", e );
}
}
/**
* Handle HTTP POST. This is an XML-RPC call, and we'll just forward
* the query to an XmlRpcServer.
*/
public void doPost( HttpServletRequest request, HttpServletResponse response )
throws ServletException
{
log.debug("Received POST to RPCServlet");
try
{
byte[] result = m_xmlrpcServer.execute( request.getInputStream() );
//
// I think it's safe to write the output as UTF-8:
// The XML-RPC standard never creates other than USASCII
// (which is UTF-8 compatible), and our special UTF-8
// hack just creates UTF-8. So in all cases our butt
// should be covered.
//
response.setContentType( "text/xml; charset=utf-8" );
response.setContentLength( result.length );
OutputStream out = response.getOutputStream();
out.write( result );
out.flush();
// log.debug("Result = "+new String(result) );
}
catch( IOException e )
{
throw new ServletException("Failed to build RPC result", e);
}
}
/**
* Handles HTTP GET. However, we do not respond to GET requests,
* other than to show an explanatory text.
*/
public void doGet( HttpServletRequest request, HttpServletResponse response )
throws ServletException
{
log.debug("Received HTTP GET to RPCServlet");
try
{
String msg = "We do not support HTTP GET here. Sorry.";
response.setContentType( "text/plain" );
response.setContentLength( msg.length() );
PrintWriter writer = new PrintWriter( new OutputStreamWriter( response.getOutputStream() ) );
writer.println( msg );
writer.flush();
}
catch( IOException e )
{
throw new ServletException("Failed to build RPC result", e);
}
}
}
| src/com/ecyrd/jspwiki/xmlrpc/RPCServlet.java | /*
JSPWiki - a JSP-based WikiWiki clone.
Copyright (C) 2001-2002 Janne Jalkanen ([email protected])
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package com.ecyrd.jspwiki.xmlrpc;
import java.io.*;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Category;
import org.apache.xmlrpc.XmlRpcServer;
import com.ecyrd.jspwiki.*;
/**
* Handles all incoming servlet requests for XML-RPC calls.
* <P>
* Uses two initialization parameters:
* <UL>
* <LI><B>handler</B> : the class which is used to handle the RPC calls.
* <LI><B>prefix</B> : The command prefix for that particular handler.
* </UL>
*
* @author Janne Jalkanen
* @since 1.6.6
*/
public class RPCServlet extends HttpServlet
{
/** This is what is appended to each command, if the handler has
not been specified. */
// FIXME: Should this be $default?
public static final String XMLRPC_PREFIX = "wiki";
private WikiEngine m_engine;
private XmlRpcServer m_xmlrpcServer = new XmlRpcServer();
Category log = Category.getInstance( RPCServlet.class );
/**
* Initializes the servlet.
*/
public void init( ServletConfig config )
throws ServletException
{
m_engine = WikiEngine.getInstance( config );
String handlerName = config.getInitParameter( "handler" );
String prefix = config.getInitParameter( "prefix" );
if( handlerName == null ) handlerName = "com.ecyrd.jspwiki.RPCHandler";
if( prefix == null ) prefix = XMLRPC_PREFIX;
try
{
Class handlerClass = Class.forName( handlerName );
AbstractRPCHandler rpchandler = (AbstractRPCHandler) handlerClass.newInstance();
rpchandler.initialize( m_engine );
m_xmlrpcServer.addHandler( prefix, rpchandler );
}
catch( Exception e )
{
log.fatal("Unable to start RPC interface: ", e);
throw new ServletException( "No RPC interface", e );
}
}
/**
* Handle HTTP POST. This is an XML-RPC call, and we'll just forward
* the query to an XmlRpcServer.
*/
public void doPost( HttpServletRequest request, HttpServletResponse response )
throws ServletException
{
log.debug("Received POST to RPCServlet");
try
{
byte[] result = m_xmlrpcServer.execute( request.getInputStream() );
//
// I think it's safe to write the output as UTF-8:
// The XML-RPC standard never creates other than USASCII
// (which is UTF-8 compatible), and our special UTF-8
// hack just creates UTF-8. So in all cases our butt
// should be covered.
//
response.setContentType( "text/xml; charset=utf-8" );
response.setContentLength( result.length );
OutputStream out = response.getOutputStream();
out.write( result );
out.flush();
// log.debug("Result = "+new String(result) );
}
catch( IOException e )
{
throw new ServletException("Failed to build RPC result", e);
}
}
/**
* Handles HTTP GET. However, we do not respond to GET requests,
* other than to show an explanatory text.
*/
public void doGet( HttpServletRequest request, HttpServletResponse response )
throws ServletException
{
log.debug("Received HTTP GET to RPCServlet");
try
{
String msg = "We do not support HTTP GET here. Sorry.";
response.setContentType( "text/plain" );
response.setContentLength( msg.length() );
PrintWriter writer = new PrintWriter( new OutputStreamWriter( response.getOutputStream() ) );
writer.println( msg );
writer.flush();
}
catch( IOException e )
{
throw new ServletException("Failed to build RPC result", e);
}
}
}
| Added new initialization model based around WikiRPCHandler.
Added metaWeblogAPI.
git-svn-id: 6c0206e3b9edd104850923da33ebd73b435d374d@622683 13f79535-47bb-0310-9956-ffa450edef68
| src/com/ecyrd/jspwiki/xmlrpc/RPCServlet.java | Added new initialization model based around WikiRPCHandler. Added metaWeblogAPI. | <ide><path>rc/com/ecyrd/jspwiki/xmlrpc/RPCServlet.java
<ide>
<ide> Category log = Category.getInstance( RPCServlet.class );
<ide>
<add> public void initHandler( String prefix, String handlerName )
<add> throws ClassNotFoundException,
<add> InstantiationException,
<add> IllegalAccessException
<add> {
<add> Class handlerClass = Class.forName( handlerName );
<add> WikiRPCHandler rpchandler = (WikiRPCHandler) handlerClass.newInstance();
<add> rpchandler.initialize( m_engine );
<add> m_xmlrpcServer.addHandler( prefix, rpchandler );
<add> }
<add>
<ide> /**
<ide> * Initializes the servlet.
<ide> */
<ide> String handlerName = config.getInitParameter( "handler" );
<ide> String prefix = config.getInitParameter( "prefix" );
<ide>
<del> if( handlerName == null ) handlerName = "com.ecyrd.jspwiki.RPCHandler";
<add> if( handlerName == null ) handlerName = "com.ecyrd.jspwiki.xmlrpc.RPCHandler";
<ide> if( prefix == null ) prefix = XMLRPC_PREFIX;
<ide>
<ide> try
<ide> {
<del> Class handlerClass = Class.forName( handlerName );
<del> AbstractRPCHandler rpchandler = (AbstractRPCHandler) handlerClass.newInstance();
<del> rpchandler.initialize( m_engine );
<del> m_xmlrpcServer.addHandler( prefix, rpchandler );
<add> initHandler( prefix, handlerName );
<add>
<add> //
<add> // FIXME: The metaweblog API should be possible to turn off.
<add> //
<add> initHandler( "metaWeblog",
<add> "com.ecyrd.jspwiki.xmlrpc.MetaWeblogHandler" );
<ide> }
<ide> catch( Exception e )
<ide> { |
|
JavaScript | mit | 755ab7e6c48cf76f534feec22cd5032e9bc36424 | 0 | gszabo/poker-player-happy-friday | 'use strict';
class Player {
constructor() {
this.VERSION = "iPlayer 2.2";
}
bet_request(game_state) {
var currentPlayer = game_state.players[game_state.in_action];
var cards = [].concat(currentPlayer.hole_cards).concat(game_state.community_cards);
var result = cards.reduce(function (previousValue, currentValue) {
if (previousValue[currentValue.rank]) {
previousValue[currentValue.rank]++;
}
else {
previousValue[currentValue.rank] = 1;
}
return previousValue;
}, {});
var shouldAllIn = false;
Object.keys(result).forEach(function (key) {
if (result[key] > 1) {
shouldAllIn = 1;
}
});
if (game_state.community_cards.length === 0) {
return game_state.current_buy_in - currentPlayer.bet;
}
if (shouldAllIn) {
return 5000;
}
else {
return 0;
}
}
showdown(game_state) {
}
}
module.exports = new Player();
| player.js | 'use strict';
class Player {
constructor() {
this.VERSION = "iPlayer 2.1";
}
bet_request(game_state) {
var currentPlayer = game_state.players[game_state.in_action];
var cards = [].concat(currentPlayer.hole_cards).concat(game_state.community_cards);
var result = cards.reduce(function (previousValue, currentValue) {
if (previousValue[currentValue.rank]) {
previousValue[currentValue.rank]++;
}
else {
previousValue[currentValue.rank] = 1;
}
return previousValue;
}, {});
var shouldAllIn = false;
Object.keys(result).forEach(function (key) {
if (result[key] > 1) {
shouldAllIn = 1;
}
});
if (game_state.community_cards.length === 0) {
return game_state.current_buy_in - currentPlayer.bet;
}
if (shouldAllIn) {
return 5000;
}
else {
return 0;
}
}
showdown(game_state) {
}
}
module.exports = new Player();
| increase version
| player.js | increase version | <ide><path>layer.js
<ide>
<ide> class Player {
<ide> constructor() {
<del> this.VERSION = "iPlayer 2.1";
<add> this.VERSION = "iPlayer 2.2";
<ide> }
<ide>
<ide> bet_request(game_state) { |
|
Java | apache-2.0 | 19c81431363609a7ddc735b61d7912be3d8531ac | 0 | RackerWilliams/xercesj,ronsigal/xerces,ronsigal/xerces,jimma/xerces,jimma/xerces,ronsigal/xerces,RackerWilliams/xercesj,jimma/xerces,RackerWilliams/xercesj | /*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.xerces.dom;
import java.util.Vector;
import org.apache.xerces.framework.XMLAttrList;
import org.apache.xerces.utils.StringPool;
import org.w3c.dom.*;
/**
* The Document interface represents the entire HTML or XML document.
* Conceptually, it is the root of the document tree, and provides the
* primary access to the document's data.
* <P>
* Since elements, text nodes, comments, processing instructions,
* etc. cannot exist outside the context of a Document, the Document
* interface also contains the factory methods needed to create these
* objects. The Node objects created have a ownerDocument attribute
* which associates them with the Document within whose context they
* were created.
*
* @version
* @since PR-DOM-Level-1-19980818.
*/
public class DeferredDocumentImpl
extends DocumentImpl
implements DeferredNode {
//
// Constants
//
/** Serialization version. */
static final long serialVersionUID = 5186323580749626857L;
// debugging
/** To include code for printing the ref count tables. */
private static final boolean DEBUG_PRINT_REF_COUNTS = false;
/** To include code for printing the internal tables. */
private static final boolean DEBUG_PRINT_TABLES = false;
/** To debug identifiers set to true and recompile. */
private static final boolean DEBUG_IDS = false;
// protected
/** Chunk shift. */
protected static final int CHUNK_SHIFT = 11; // 2^11 = 2k
/** Chunk size. */
protected static final int CHUNK_SIZE = (1 << CHUNK_SHIFT);
/** Chunk mask. */
protected static final int CHUNK_MASK = CHUNK_SIZE - 1;
/** Initial chunk size. */
protected static final int INITIAL_CHUNK_COUNT = (1 << (16 - CHUNK_SHIFT)); // 2^16 = 64k
//
// Data
//
// lazy-eval information
/** Node count. */
protected transient int fNodeCount = 0;
/** Node types. */
protected transient int fNodeType[][];
/** Node names. */
protected transient int fNodeName[][];
/** Node values. */
protected transient int fNodeValue[][];
/** Node parents. */
protected transient int fNodeParent[][];
/** Node first children. */
protected transient int fNodeLastChild[][];
/** Node prev siblings. */
protected transient int fNodePrevSib[][];
/** Identifier count. */
protected transient int fIdCount;
/** Identifier name indexes. */
protected transient int fIdName[];
/** Identifier element indexes. */
protected transient int fIdElement[];
/** String pool cache. */
protected transient StringPool fStringPool;
/** DOM2: For namespace support in the deferred case.
*/
// Implementation Note: The deferred element and attribute must know how to
// interpret the int representing the qname.
protected boolean fNamespacesEnabled = false;
//
// Constructors
//
/**
* NON-DOM: Actually creating a Document is outside the DOM's spec,
* since it has to operate in terms of a particular implementation.
*/
public DeferredDocumentImpl(StringPool stringPool) {
this(stringPool, false);
} // <init>(ParserState)
/**
* NON-DOM: Actually creating a Document is outside the DOM's spec,
* since it has to operate in terms of a particular implementation.
*/
public DeferredDocumentImpl(StringPool stringPool, boolean namespacesEnabled) {
this(stringPool, namespacesEnabled, false);
} // <init>(ParserState,boolean)
/** Experimental constructor. */
public DeferredDocumentImpl(StringPool stringPool,
boolean namespaces, boolean grammarAccess) {
super(grammarAccess);
fStringPool = stringPool;
syncData(true);
syncChildren(true);
fNamespacesEnabled = namespaces;
} // <init>(StringPool,boolean,boolean)
//
// Public methods
//
/** Returns the cached parser.getNamespaces() value.*/
boolean getNamespacesEnabled() {
return fNamespacesEnabled;
}
// internal factory methods
/** Creates a document node in the table. */
public int createDocument() {
int nodeIndex = createNode(Node.DOCUMENT_NODE);
return nodeIndex;
}
/** Creates a doctype. */
public int createDocumentType(int rootElementNameIndex, int publicId, int systemId) {
// create node
int nodeIndex = createNode(Node.DOCUMENT_TYPE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// added for DOM2: createDoctype factory method includes
// name, publicID, systemID
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id
setChunkIndex(fNodeName, rootElementNameIndex, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
// return node index
return nodeIndex;
} // createDocumentType(int,int,int):int
public void setInternalSubset(int doctypeIndex, int subsetIndex) {
int chunk = doctypeIndex >> CHUNK_SHIFT;
int index = doctypeIndex & CHUNK_MASK;
int extraDataIndex = fNodeValue[chunk][index];
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
fNodeLastChild[echunk][eindex] = subsetIndex;
}
/** Creates a notation in the table. */
public int createNotation(int notationName, int publicId, int systemId) throws Exception {
// create node
int nodeIndex = createNode(Node.NOTATION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id, and notation name
setChunkIndex(fNodeName, notationName, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
// return node index
return nodeIndex;
} // createNotation(int,int,int):int
/** Creates an entity in the table. */
public int createEntity(int entityName, int publicId, int systemId, int notationName) throws Exception {
// create node
int nodeIndex = createNode(Node.ENTITY_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id, and notation name
setChunkIndex(fNodeName, entityName, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
setChunkIndex(fNodeLastChild, notationName, echunk, eindex);
// return node index
return nodeIndex;
} // createEntity(int,int,int,int):int
/** Creates an entity reference node in the table. */
public int createEntityReference(int nameIndex) throws Exception {
// create node
int nodeIndex = createNode(Node.ENTITY_REFERENCE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, nameIndex, chunk, index);
// return node index
return nodeIndex;
} // createEntityReference(int):int
/** Creates an element node in the table. */
public int createElement(int elementNameIndex, XMLAttrList attrList, int attrListIndex) {
// create node
int elementNodeIndex = createNode(Node.ELEMENT_NODE);
int elementChunk = elementNodeIndex >> CHUNK_SHIFT;
int elementIndex = elementNodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, elementNameIndex, elementChunk, elementIndex);
// create attributes
if (attrListIndex != -1) {
int first = attrList.getFirstAttr(attrListIndex);
int lastAttrNodeIndex = -1;
int lastAttrChunk = -1;
int lastAttrIndex = -1;
for (int index = first;
index != -1;
index = attrList.getNextAttr(index)) {
// create attribute
int attrNodeIndex = createAttribute(attrList.getAttrName(index),
attrList.getAttValue(index),
attrList.isSpecified(index));
int attrChunk = attrNodeIndex >> CHUNK_SHIFT;
int attrIndex = attrNodeIndex & CHUNK_MASK;
setChunkIndex(fNodeParent, elementNodeIndex, attrChunk, attrIndex);
// add links
if (index == first) {
setChunkIndex(fNodeValue, attrNodeIndex, elementChunk, elementIndex);
}
else {
setChunkIndex(fNodePrevSib, attrNodeIndex, lastAttrChunk, lastAttrIndex);
}
// save last chunk and index
lastAttrNodeIndex = attrNodeIndex;
lastAttrChunk = attrChunk;
lastAttrIndex = attrIndex;
}
}
// return node index
return elementNodeIndex;
} // createElement(int,XMLAttrList,int):int
/** Creates an attributes in the table. */
public int createAttribute(int attrNameIndex, int attrValueIndex,
boolean specified) {
// create node
int nodeIndex = createNode(NodeImpl.ATTRIBUTE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, attrNameIndex, chunk, index);
setChunkIndex(fNodeValue, specified ? 1 : 0, chunk, index);
// append value as text node
int textNodeIndex = createTextNode(attrValueIndex, false);
appendChild(nodeIndex, textNodeIndex);
// return node index
return nodeIndex;
} // createAttribute(int,int,boolean):int
/** Creates an element definition in the table. */
public int createElementDefinition(int elementNameIndex) {
// create node
int nodeIndex = createNode(NodeImpl.ELEMENT_DEFINITION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, elementNameIndex, chunk, index);
// return node index
return nodeIndex;
} // createElementDefinition(int):int
/** Creates a text node in the table. */
public int createTextNode(int dataIndex, boolean ignorableWhitespace) {
// create node
int nodeIndex = createNode(Node.TEXT_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// use last child to store ignorableWhitespace info
setChunkIndex(fNodeLastChild,
ignorableWhitespace ? 1 : 0, chunk, index);
// return node index
return nodeIndex;
} // createTextNode(int,boolean):int
/** Creates a CDATA section node in the table. */
public int createCDATASection(int dataIndex, boolean ignorableWhitespace) {
// create node
int nodeIndex = createNode(Node.CDATA_SECTION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// use last child to store ignorableWhitespace info
setChunkIndex(fNodeLastChild,
ignorableWhitespace ? 1 : 0, chunk, index);
// return node index
return nodeIndex;
} // createCDATASection(int,boolean):int
/** Creates a processing instruction node in the table. */
public int createProcessingInstruction(int targetIndex, int dataIndex) {
// create node
int nodeIndex = createNode(Node.PROCESSING_INSTRUCTION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, targetIndex, chunk, index);
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// return node index
return nodeIndex;
} // createProcessingInstruction(int,int):int
/** Creates a comment node in the table. */
public int createComment(int dataIndex) {
// create node
int nodeIndex = createNode(Node.COMMENT_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// return node index
return nodeIndex;
} // createComment(int):int
/** Appends a child to the specified parent in the table. */
public void appendChild(int parentIndex, int childIndex) {
// append parent index
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int cchunk = childIndex >> CHUNK_SHIFT;
int cindex = childIndex & CHUNK_MASK;
setChunkIndex(fNodeParent, parentIndex, cchunk, cindex);
// set previous sibling of new child
int olast = getChunkIndex(fNodeLastChild, pchunk, pindex);
setChunkIndex(fNodePrevSib, olast, cchunk, cindex);
// update parent's last child
setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
} // appendChild(int,int)
/** Adds an attribute node to the specified element. */
public int setAttributeNode(int elemIndex, int attrIndex) {
int echunk = elemIndex >> CHUNK_SHIFT;
int eindex = elemIndex & CHUNK_MASK;
int achunk = attrIndex >> CHUNK_SHIFT;
int aindex = attrIndex & CHUNK_MASK;
// see if this attribute is already here
String attrName =
fStringPool.toString(getChunkIndex(fNodeName, achunk, aindex));
int oldAttrIndex = getChunkIndex(fNodeValue, echunk, eindex);
int nextIndex = -1;
int oachunk = -1;
int oaindex = -1;
while (oldAttrIndex != -1) {
oachunk = oldAttrIndex >> CHUNK_SHIFT;
oaindex = oldAttrIndex & CHUNK_MASK;
String oldAttrName =
fStringPool.toString(getChunkIndex(fNodeName, oachunk, oaindex));
if (oldAttrName.equals(attrName)) {
break;
}
nextIndex = oldAttrIndex;
oldAttrIndex = getChunkIndex(fNodePrevSib, oachunk, oaindex);
}
// remove old attribute
if (oldAttrIndex != -1) {
// patch links
int prevIndex = getChunkIndex(fNodePrevSib, oachunk, oaindex);
if (nextIndex == -1) {
setChunkIndex(fNodeValue, prevIndex, echunk, eindex);
}
else {
int pchunk = nextIndex >> CHUNK_SHIFT;
int pindex = nextIndex & CHUNK_MASK;
setChunkIndex(fNodePrevSib, prevIndex, pchunk, pindex);
}
// remove connections to siblings
clearChunkIndex(fNodeType, oachunk, oaindex);
clearChunkIndex(fNodeName, oachunk, oaindex);
clearChunkIndex(fNodeValue, oachunk, oaindex);
clearChunkIndex(fNodeParent, oachunk, oaindex);
clearChunkIndex(fNodePrevSib, oachunk, oaindex);
int attrTextIndex =
clearChunkIndex(fNodeLastChild, oachunk, oaindex);
int atchunk = attrTextIndex >> CHUNK_SHIFT;
int atindex = attrTextIndex & CHUNK_MASK;
clearChunkIndex(fNodeType, atchunk, atindex);
clearChunkIndex(fNodeValue, atchunk, atindex);
clearChunkIndex(fNodeParent, atchunk, atindex);
clearChunkIndex(fNodeLastChild, atchunk, atindex);
}
// add new attribute
int prevIndex = getChunkIndex(fNodeValue, echunk, eindex);
setChunkIndex(fNodeValue, attrIndex, echunk, eindex);
setChunkIndex(fNodePrevSib, prevIndex, achunk, aindex);
// return
return oldAttrIndex;
} // setAttributeNode(int,int):int
/** Inserts a child before the specified node in the table. */
public int insertBefore(int parentIndex, int newChildIndex, int refChildIndex) {
if (refChildIndex == -1) {
appendChild(parentIndex, newChildIndex);
return newChildIndex;
}
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int nchunk = newChildIndex >> CHUNK_SHIFT;
int nindex = newChildIndex & CHUNK_MASK;
int rchunk = refChildIndex >> CHUNK_SHIFT;
int rindex = refChildIndex & CHUNK_MASK;
// 1) if ref.prev != null then
// begin
// new.prev := ref.prev;
// end;
int nextIndex = -1;
int index = -1;
for (index = getChunkIndex(fNodeLastChild, pchunk, pindex);
index != -1;
index = getChunkIndex(fNodePrevSib, index >> CHUNK_SHIFT, index & CHUNK_MASK)) {
if (nextIndex == refChildIndex) {
break;
}
nextIndex = index;
}
if (index != -1) {
int ochunk = index >> CHUNK_SHIFT;
int oindex = index & CHUNK_MASK;
setChunkIndex(fNodePrevSib, newChildIndex, ochunk, oindex);
}
// 2) ref.prev := new;
setChunkIndex(fNodePrevSib, refChildIndex, nchunk, nindex);
return newChildIndex;
} // insertBefore(int,int,int):int
/** Sets the last child of the parentIndex to childIndex. */
public void setAsLastChild(int parentIndex, int childIndex) {
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int chunk = childIndex >> CHUNK_SHIFT;
int index = childIndex & CHUNK_MASK;
setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
} // setAsLastChild(int,int)
/**
* Returns the parent node of the given node.
* <em>Calling this method does not free the parent index.</em>
*/
public int getParentNode(int nodeIndex) {
return getParentNode(nodeIndex, false);
}
/**
* Returns the parent node of the given node.
* @param free True to free parent node.
*/
public int getParentNode(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeParent, chunk, index)
: getChunkIndex(fNodeParent, chunk, index);
} // getParentNode(int):int
/** Returns the last child of the given node. */
public int getLastChild(int nodeIndex) {
return getLastChild(nodeIndex, true);
}
/**
* Returns the last child of the given node.
* @param free True to free child index.
*/
public int getLastChild(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeLastChild, chunk, index)
: getChunkIndex(fNodeLastChild, chunk, index);
} // getLastChild(int,boolean):int
/**
* Returns the prev sibling of the given node.
* This is post-normalization of Text Nodes.
*/
public int getPrevSibling(int nodeIndex) {
return getPrevSibling(nodeIndex, true);
}
/**
* Returns the prev sibling of the given node.
* @param free True to free sibling index.
*/
public int getPrevSibling(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
nodeIndex = free ? clearChunkIndex(fNodePrevSib, chunk, index)
: getChunkIndex(fNodePrevSib, chunk, index);
while (nodeIndex != -1 && getChunkIndex(fNodeType, chunk, index) == Node.TEXT_NODE) {
nodeIndex = getChunkIndex(fNodePrevSib, chunk, index);
chunk = nodeIndex >> CHUNK_SHIFT;
index = nodeIndex & CHUNK_MASK;
}
return nodeIndex;
} // getPrevSibling(int,boolean):int
/**
* Returns the <i>real</i> prev sibling of the given node,
* directly from the data structures. Used by TextImpl#getNodeValue()
* to normalize values.
*/
public int getRealPrevSibling(int nodeIndex) {
return getRealPrevSibling(nodeIndex, true);
}
/**
* Returns the <i>real</i> prev sibling of the given node.
* @param free True to free sibling index.
*/
public int getRealPrevSibling(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodePrevSib, chunk, index)
: getChunkIndex(fNodePrevSib, chunk, index);
} // getReadPrevSibling(int,boolean):int
/**
* Returns the index of the element definition in the table
* with the specified name index, or -1 if no such definition
* exists.
*/
public int lookupElementDefinition(int elementNameIndex) {
if (fNodeCount > 1) {
// find doctype
int docTypeIndex = -1;
int nchunk = 0;
int nindex = 0;
for (int index = getChunkIndex(fNodeLastChild, nchunk, nindex);
index != -1;
index = getChunkIndex(fNodePrevSib, nchunk, nindex)) {
nchunk = index >> CHUNK_SHIFT;
nindex = index & CHUNK_MASK;
if (getChunkIndex(fNodeType, nchunk, nindex) == Node.DOCUMENT_TYPE_NODE) {
docTypeIndex = index;
break;
}
}
// find element definition
if (docTypeIndex == -1) {
return -1;
}
nchunk = docTypeIndex >> CHUNK_SHIFT;
nindex = docTypeIndex & CHUNK_MASK;
for (int index = getChunkIndex(fNodeLastChild, nchunk, nindex);
index != -1;
index = getChunkIndex(fNodePrevSib, nchunk, nindex)) {
nchunk = index >> CHUNK_SHIFT;
nindex = index & CHUNK_MASK;
if (getChunkIndex(fNodeName, nchunk, nindex) == elementNameIndex) {
return index;
}
}
}
return -1;
} // lookupElementDefinition(int):int
/** Instantiates the requested node object. */
public DeferredNode getNodeObject(int nodeIndex) {
// is there anything to do?
if (nodeIndex == -1) {
return null;
}
// get node type
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int type = clearChunkIndex(fNodeType, chunk, index);
clearChunkIndex(fNodeParent, chunk, index);
// create new node
DeferredNode node = null;
switch (type) {
//
// Standard DOM node types
//
case Node.ATTRIBUTE_NODE: {
if (fNamespacesEnabled) {
node = new DeferredAttrNSImpl(this, nodeIndex);
} else {
node = new DeferredAttrImpl(this, nodeIndex);
}
break;
}
case Node.CDATA_SECTION_NODE: {
node = new DeferredCDATASectionImpl(this, nodeIndex);
break;
}
case Node.COMMENT_NODE: {
node = new DeferredCommentImpl(this, nodeIndex);
break;
}
// NOTE: Document fragments can never be "fast".
//
// The parser will never ask to create a document
// fragment during the parse. Document fragments
// are used by the application *after* the parse.
//
// case Node.DOCUMENT_FRAGMENT_NODE: { break; }
case Node.DOCUMENT_NODE: {
// this node is never "fast"
node = this;
break;
}
case Node.DOCUMENT_TYPE_NODE: {
node = new DeferredDocumentTypeImpl(this, nodeIndex);
// save the doctype node
docType = (DocumentTypeImpl)node;
break;
}
case Node.ELEMENT_NODE: {
if (DEBUG_IDS) {
System.out.println("getNodeObject(ELEMENT_NODE): "+nodeIndex);
}
// create node
if (fNamespacesEnabled) {
node = new DeferredElementNSImpl(this, nodeIndex);
} else {
node = new DeferredElementImpl(this, nodeIndex);
}
// save the document element node
if (docElement == null) {
docElement = (ElementImpl)node;
}
// check to see if this element needs to be
// registered for its ID attributes
if (fIdElement != null) {
int idIndex = DeferredDocumentImpl.binarySearch(fIdElement, 0, fIdCount-1, nodeIndex);
while (idIndex != -1) {
if (DEBUG_IDS) {
System.out.println(" id index: "+idIndex);
System.out.println(" fIdName["+idIndex+
"]: "+fIdName[idIndex]);
}
// register ID
int nameIndex = fIdName[idIndex];
if (nameIndex != -1) {
String name = fStringPool.toString(nameIndex);
if (DEBUG_IDS) {
System.out.println(" name: "+name);
System.out.print("getNodeObject()#");
}
putIdentifier0(name, (Element)node);
fIdName[idIndex] = -1;
}
// continue if there are more IDs for
// this element
if (idIndex + 1 < fIdCount &&
fIdElement[idIndex + 1] == nodeIndex) {
idIndex++;
}
else {
idIndex = -1;
}
}
}
break;
}
case Node.ENTITY_NODE: {
node = new DeferredEntityImpl(this, nodeIndex);
break;
}
case Node.ENTITY_REFERENCE_NODE: {
node = new DeferredEntityReferenceImpl(this, nodeIndex);
break;
}
case Node.NOTATION_NODE: {
node = new DeferredNotationImpl(this, nodeIndex);
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
node = new DeferredProcessingInstructionImpl(this, nodeIndex);
break;
}
case Node.TEXT_NODE: {
node = new DeferredTextImpl(this, nodeIndex);
break;
}
//
// non-standard DOM node types
//
case NodeImpl.ELEMENT_DEFINITION_NODE: {
node = new DeferredElementDefinitionImpl(this, nodeIndex);
break;
}
default: {
throw new IllegalArgumentException("type: "+type);
}
} // switch node type
// store and return
if (node != null) {
return node;
}
// error
throw new IllegalArgumentException();
} // createNodeObject(int):Node
/** Returns the name of the given node. */
public String getNodeNameString(int nodeIndex) {
return getNodeNameString(nodeIndex, true);
} // getNodeNameString(int):String
/**
* Returns the name of the given node.
* @param free True to free the string index.
*/
public String getNodeNameString(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return null;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int nameIndex = free
? clearChunkIndex(fNodeName, chunk, index)
: getChunkIndex(fNodeName, chunk, index);
if (nameIndex == -1) {
return null;
}
return fStringPool.toString(nameIndex);
} // getNodeNameString(int,boolean):String
/** Returns the value of the given node. */
public String getNodeValueString(int nodeIndex) {
return getNodeValueString(nodeIndex, true);
} // getNodeValueString(int):String
/**
* Returns the value of the given node.
* @param free True to free the string index.
*/
public String getNodeValueString(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return null;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int valueIndex = free
? clearChunkIndex(fNodeValue, chunk, index)
: getChunkIndex(fNodeValue, chunk, index);
if (valueIndex == -1) {
return null;
}
return fStringPool.toString(valueIndex);
} // getNodeValueString(int,boolean):String
/** Returns the real int name of the given node. */
public int getNodeName(int nodeIndex) {
return getNodeName(nodeIndex, true);
}
/**
* Returns the real int name of the given node.
* @param free True to free the name index.
*/
public int getNodeName(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeName, chunk, index)
: getChunkIndex(fNodeName, chunk, index);
} // getNodeName(int,boolean):int
/**
* Returns the real int value of the given node.
* Used by AttrImpl to store specified value (1 == true).
*/
public int getNodeValue(int nodeIndex) {
return getNodeValue(nodeIndex, true);
}
/**
* Returns the real int value of the given node.
* @param free True to free the value index.
*/
public int getNodeValue(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeValue, chunk, index)
: getChunkIndex(fNodeValue, chunk, index);
} // getNodeValue(int,boolean):int
/** Returns the type of the given node. */
public short getNodeType(int nodeIndex) {
return getNodeType(nodeIndex, true);
}
/**
* Returns the type of the given node.
* @param True to free type index.
*/
public short getNodeType(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
if (free) {
return (short)clearChunkIndex(fNodeType, chunk, index);
}
return (short)getChunkIndex(fNodeType, chunk, index);
} // getNodeType(int):int
// identifier maintenance
/** Registers an identifier name with a specified element node. */
public void putIdentifier(int nameIndex, int elementNodeIndex) {
if (DEBUG_IDS) {
System.out.println("putIdentifier("+nameIndex+", "+elementNodeIndex+')'+
" // "+
fStringPool.toString(nameIndex)+
", "+
fStringPool.toString(getChunkIndex(fNodeName, elementNodeIndex >> CHUNK_SHIFT, elementNodeIndex & CHUNK_MASK)));
}
// initialize arrays
if (fIdName == null) {
fIdName = new int[64];
fIdElement = new int[64];
}
// resize arrays
if (fIdCount == fIdName.length) {
int idName[] = new int[fIdCount * 2];
System.arraycopy(fIdName, 0, idName, 0, fIdCount);
fIdName = idName;
int idElement[] = new int[idName.length];
System.arraycopy(fIdElement, 0, idElement, 0, fIdCount);
fIdElement = idElement;
}
// store identifier
fIdName[fIdCount] = nameIndex;
fIdElement[fIdCount] = elementNodeIndex;
fIdCount++;
} // putIdentifier(int,int)
//
// DEBUG
//
/** Prints out the tables. */
public void print() {
if (DEBUG_PRINT_REF_COUNTS) {
System.out.print("num\t");
System.out.print("type\t");
System.out.print("name\t");
System.out.print("val\t");
System.out.print("par\t");
System.out.print("fch\t");
System.out.print("nsib");
System.out.println();
for (int i = 0; i < fNodeType.length; i++) {
if (fNodeType[i] != null) {
// separator
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.println();
// set count
System.out.print(i);
System.out.print('\t');
System.out.print(fNodeType[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeName[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeValue[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeParent[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeLastChild[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodePrevSib[i][CHUNK_SIZE]);
System.out.println();
// clear count
System.out.print(i);
System.out.print('\t');
System.out.print(fNodeType[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeName[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeValue[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeParent[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeLastChild[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodePrevSib[i][CHUNK_SIZE + 1]);
System.out.println();
}
}
}
if (DEBUG_PRINT_TABLES) {
// This assumes that the document is small
System.out.println("# start table");
for (int i = 0; i < fNodeCount; i++) {
int chunk = i >> CHUNK_SHIFT;
int index = i & CHUNK_MASK;
if (i % 10 == 0) {
System.out.print("num\t");
System.out.print("type\t");
System.out.print("name\t");
System.out.print("val\t");
System.out.print("par\t");
System.out.print("fch\t");
System.out.print("nsib");
System.out.println();
}
System.out.print(i);
System.out.print('\t');
System.out.print(getChunkIndex(fNodeType, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeName, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeValue, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeParent, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeLastChild, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodePrevSib, chunk, index));
/***
System.out.print(fNodeType[0][i]);
System.out.print('\t');
System.out.print(fNodeName[0][i]);
System.out.print('\t');
System.out.print(fNodeValue[0][i]);
System.out.print('\t');
System.out.print(fNodeParent[0][i]);
System.out.print('\t');
System.out.print(fNodeFirstChild[0][i]);
System.out.print('\t');
System.out.print(fNodeLastChild[0][i]);
System.out.print('\t');
System.out.print(fNodePrevSib[0][i]);
System.out.print('\t');
System.out.print(fNodeNextSib[0][i]);
/***/
System.out.println();
}
System.out.println("# end table");
}
} // print()
//
// DeferredNode methods
//
/** Returns the node index. */
public int getNodeIndex() {
return 0;
}
//
// Protected methods
//
/** access to string pool. */
protected StringPool getStringPool() {
return fStringPool;
}
/** Synchronizes the node's data. */
protected void synchronizeData() {
// no need to sync in the future
syncData(false);
// fluff up enough nodes to fill identifiers hash
if (fIdElement != null) {
// REVISIT: There has to be a more efficient way of
// doing this. But keep in mind that the
// tree can have been altered and re-ordered
// before all of the element nodes with ID
// attributes have been registered. For now
// this is reasonable and safe. -Ac
IntVector path = new IntVector();
for (int i = 0; i < fIdCount; i++) {
// ignore if it's already been registered
int elementNodeIndex = fIdElement[i];
int idNameIndex = fIdName[i];
if (idNameIndex == -1) {
continue;
}
// find path from this element to the root
path.removeAllElements();
int index = elementNodeIndex;
do {
path.addElement(index);
int pchunk = index >> CHUNK_SHIFT;
int pindex = index & CHUNK_MASK;
index = getChunkIndex(fNodeParent, pchunk, pindex);
} while (index != -1);
// Traverse path (backwards), fluffing the elements
// along the way. When this loop finishes, "place"
// will contain the reference to the element node
// we're interested in. -Ac
Node place = this;
for (int j = path.size() - 2; j >= 0; j--) {
index = path.elementAt(j);
Node child = place.getLastChild();
while (child != null) {
if (child instanceof DeferredNode) {
int nodeIndex = ((DeferredNode)child).getNodeIndex();
if (nodeIndex == index) {
place = child;
break;
}
}
child = child.getPreviousSibling();
}
}
// register the element
Element element = (Element)place;
String name = fStringPool.toString(idNameIndex);
putIdentifier0(name, element);
fIdName[i] = -1;
// see if there are more IDs on this element
while (fIdElement[i + 1] == elementNodeIndex) {
name = fStringPool.toString(fIdName[++i]);
putIdentifier0(name, element);
}
}
} // if identifiers
} // synchronizeData()
/**
* Synchronizes the node's children with the internal structure.
* Fluffing the children at once solves a lot of work to keep
* the two structures in sync. The problem gets worse when
* editing the tree -- this makes it a lot easier.
*/
protected void synchronizeChildren() {
if (syncData()) {
synchronizeData();
}
// no need to sync in the future
syncChildren(false);
getNodeType(0);
// create children and link them as siblings
ChildNode first = null;
ChildNode last = null;
for (int index = getLastChild(0);
index != -1;
index = getPrevSibling(index)) {
ChildNode node = (ChildNode)getNodeObject(index);
if (last == null) {
last = node;
}
else {
first.previousSibling = node;
}
node.ownerNode = this;
node.owned(true);
node.nextSibling = first;
first = node;
// save doctype and document type
int type = node.getNodeType();
if (type == Node.ELEMENT_NODE) {
docElement = (ElementImpl)node;
}
else if (type == Node.DOCUMENT_TYPE_NODE) {
docType = (DocumentTypeImpl)node;
}
}
if (first != null) {
firstChild = first;
first.firstChild(true);
lastChild(last);
}
} // synchronizeChildren()
// utility methods
/** Ensures that the internal tables are large enough. */
protected boolean ensureCapacity(int chunk, int index) {
// create buffers
if (fNodeType == null) {
fNodeType = new int[INITIAL_CHUNK_COUNT][];
fNodeName = new int[INITIAL_CHUNK_COUNT][];
fNodeValue = new int[INITIAL_CHUNK_COUNT][];
fNodeParent = new int[INITIAL_CHUNK_COUNT][];
fNodeLastChild = new int[INITIAL_CHUNK_COUNT][];
fNodePrevSib = new int[INITIAL_CHUNK_COUNT][];
}
// return true if table is already big enough
try {
return fNodeType[chunk][index] != 0;
}
// resize the tables
catch (ArrayIndexOutOfBoundsException ex) {
int newsize = chunk * 2;
int[][] newArray = new int[newsize][];
System.arraycopy(fNodeType, 0, newArray, 0, chunk);
fNodeType = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeName, 0, newArray, 0, chunk);
fNodeName = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeValue, 0, newArray, 0, chunk);
fNodeValue = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeParent, 0, newArray, 0, chunk);
fNodeParent = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeLastChild, 0, newArray, 0, chunk);
fNodeLastChild = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodePrevSib, 0, newArray, 0, chunk);
fNodePrevSib = newArray;
}
catch (NullPointerException ex) {
// ignore
}
// create chunks
createChunk(fNodeType, chunk);
createChunk(fNodeName, chunk);
createChunk(fNodeValue, chunk);
createChunk(fNodeParent, chunk);
createChunk(fNodeLastChild, chunk);
createChunk(fNodePrevSib, chunk);
// success
return true;
} // ensureCapacity(int,int):boolean
/** Creates a node of the specified type. */
protected int createNode(short nodeType) {
// ensure tables are large enough
int chunk = fNodeCount >> CHUNK_SHIFT;
int index = fNodeCount & CHUNK_MASK;
ensureCapacity(chunk, index);
// initialize node
setChunkIndex(fNodeType, nodeType, chunk, index);
// return node index number
return fNodeCount++;
} // createNode(short):int
/**
* Performs a binary search for a target value in an array of
* values. The array of values must be in ascending sorted order
* before calling this method and all array values must be
* non-negative.
*
* @param values The array of values to search.
* @param start The starting offset of the search.
* @param end The ending offset of the search.
* @param target The target value.
*
* @return This function will return the <i>first</i> occurrence
* of the target value, or -1 if the target value cannot
* be found.
*/
protected static int binarySearch(final int values[],
int start, int end, int target) {
if (DEBUG_IDS) {
System.out.println("binarySearch(), target: "+target);
}
// look for target value
while (start <= end) {
// is this the one we're looking for?
int middle = (start + end) / 2;
int value = values[middle];
if (DEBUG_IDS) {
System.out.print(" value: "+value+", target: "+target+" // ");
print(values, start, end, middle, target);
}
if (value == target) {
while (middle > 0 && values[middle - 1] == target) {
middle--;
}
if (DEBUG_IDS) {
System.out.println("FOUND AT "+middle);
}
return middle;
}
// is this point higher or lower?
if (value > target) {
end = middle - 1;
}
else {
start = middle + 1;
}
} // while
// not found
if (DEBUG_IDS) {
System.out.println("NOT FOUND!");
}
return -1;
} // binarySearch(int[],int,int,int):int
//
// Private methods
//
/** Creates the specified chunk in the given array of chunks. */
private final void createChunk(int data[][], int chunk) {
data[chunk] = new int[CHUNK_SIZE + 2];
for (int i = 0; i < CHUNK_SIZE; i++) {
data[chunk][i] = -1;
}
}
/**
* Sets the specified value in the given of data at the chunk and index.
*
* @return Returns the old value.
*/
private final int setChunkIndex(int data[][], int value, int chunk, int index) {
if (value == -1) {
return clearChunkIndex(data, chunk, index);
}
int ovalue = data[chunk][index];
if (ovalue == -1) {
data[chunk][CHUNK_SIZE]++;
}
data[chunk][index] = value;
return ovalue;
}
/**
* Returns the specified value in the given data at the chunk and index.
*/
private final int getChunkIndex(int data[][], int chunk, int index) {
return data[chunk] != null ? data[chunk][index] : -1;
}
/**
* Clears the specified value in the given data at the chunk and index.
* Note that this method will clear the given chunk if the reference
* count becomes zero.
*
* @return Returns the old value.
*/
private final int clearChunkIndex(int data[][], int chunk, int index) {
int value = data[chunk] != null ? data[chunk][index] : -1;
if (value != -1) {
data[chunk][CHUNK_SIZE + 1]++;
data[chunk][index] = -1;
if (data[chunk][CHUNK_SIZE] == data[chunk][CHUNK_SIZE + 1]) {
data[chunk] = null;
}
}
return value;
}
/**
* This version of putIdentifier is needed to avoid fluffing
* all of the paths to ID attributes when a node object is
* created that contains an ID attribute.
*/
private final void putIdentifier0(String idName, Element element) {
if (DEBUG_IDS) {
System.out.println("putIdentifier0("+
idName+", "+
element+')');
}
// create hashtable
if (identifiers == null) {
identifiers = new java.util.Hashtable();
}
// save ID and its associated element
identifiers.put(idName, element);
} // putIdentifier0(String,Element)
/** Prints the ID array. */
private static void print(int values[], int start, int end,
int middle, int target) {
if (DEBUG_IDS) {
System.out.print(start);
System.out.print(" [");
for (int i = start; i < end; i++) {
if (middle == i) {
System.out.print("!");
}
System.out.print(values[i]);
if (values[i] == target) {
System.out.print("*");
}
if (i < end - 1) {
System.out.print(" ");
}
}
System.out.println("] "+end);
}
} // print(int[],int,int,int,int)
//
// Classes
//
/**
* A simple integer vector.
*/
static class IntVector {
//
// Data
//
/** Data. */
private int data[];
/** Size. */
private int size;
//
// Public methods
//
/** Returns the length of this vector. */
public int size() {
return size;
}
/** Returns the element at the specified index. */
public int elementAt(int index) {
return data[index];
}
/** Appends an element to the end of the vector. */
public void addElement(int element) {
ensureCapacity(size + 1);
data[size++] = element;
}
/** Clears the vector. */
public void removeAllElements() {
size = 0;
}
//
// Private methods
//
/** Makes sure that there is enough storage. */
private void ensureCapacity(int newsize) {
if (data == null) {
data = new int[newsize + 15];
}
else if (newsize > data.length) {
int newdata[] = new int[newsize + 15];
System.arraycopy(data, 0, newdata, 0, data.length);
data = newdata;
}
} // ensureCapacity(int)
} // class IntVector
} // class DeferredDocumentImpl
| src/org/apache/xerces/dom/DeferredDocumentImpl.java | /*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact [email protected].
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.xerces.dom;
import java.util.Vector;
import org.apache.xerces.framework.XMLAttrList;
import org.apache.xerces.utils.StringPool;
import org.w3c.dom.*;
/**
* The Document interface represents the entire HTML or XML document.
* Conceptually, it is the root of the document tree, and provides the
* primary access to the document's data.
* <P>
* Since elements, text nodes, comments, processing instructions,
* etc. cannot exist outside the context of a Document, the Document
* interface also contains the factory methods needed to create these
* objects. The Node objects created have a ownerDocument attribute
* which associates them with the Document within whose context they
* were created.
*
* @version
* @since PR-DOM-Level-1-19980818.
*/
public class DeferredDocumentImpl
extends DocumentImpl
implements DeferredNode {
//
// Constants
//
/** Serialization version. */
static final long serialVersionUID = 5186323580749626857L;
// debugging
/** To include code for printing the ref count tables. */
private static final boolean DEBUG_PRINT_REF_COUNTS = false;
/** To include code for printing the internal tables. */
private static final boolean DEBUG_PRINT_TABLES = false;
/** To debug identifiers set to true and recompile. */
private static final boolean DEBUG_IDS = false;
// protected
/** Chunk shift. */
protected static final int CHUNK_SHIFT = 11; // 2^11 = 2k
/** Chunk size. */
protected static final int CHUNK_SIZE = (1 << CHUNK_SHIFT);
/** Chunk mask. */
protected static final int CHUNK_MASK = CHUNK_SIZE - 1;
/** Initial chunk size. */
protected static final int INITIAL_CHUNK_COUNT = (1 << (16 - CHUNK_SHIFT)); // 2^16 = 64k
//
// Data
//
// lazy-eval information
/** Node count. */
protected transient int fNodeCount = 0;
/** Node types. */
protected transient int fNodeType[][];
/** Node names. */
protected transient int fNodeName[][];
/** Node values. */
protected transient int fNodeValue[][];
/** Node parents. */
protected transient int fNodeParent[][];
/** Node first children. */
protected transient int fNodeLastChild[][];
/** Node prev siblings. */
protected transient int fNodePrevSib[][];
/** Identifier count. */
protected transient int fIdCount;
/** Identifier name indexes. */
protected transient int fIdName[];
/** Identifier element indexes. */
protected transient int fIdElement[];
/** String pool cache. */
protected transient StringPool fStringPool;
/** DOM2: For namespace support in the deferred case.
*/
// Implementation Note: The deferred element and attribute must know how to
// interpret the int representing the qname.
protected boolean fNamespacesEnabled = false;
//
// Constructors
//
/**
* NON-DOM: Actually creating a Document is outside the DOM's spec,
* since it has to operate in terms of a particular implementation.
*/
public DeferredDocumentImpl(StringPool stringPool) {
this(stringPool, false);
} // <init>(ParserState)
/**
* NON-DOM: Actually creating a Document is outside the DOM's spec,
* since it has to operate in terms of a particular implementation.
*/
public DeferredDocumentImpl(StringPool stringPool, boolean namespacesEnabled) {
this(stringPool, namespacesEnabled, false);
} // <init>(ParserState,boolean)
/** Experimental constructor. */
public DeferredDocumentImpl(StringPool stringPool,
boolean namespaces, boolean grammarAccess) {
super(grammarAccess);
fStringPool = stringPool;
syncData(true);
syncChildren(true);
fNamespacesEnabled = namespaces;
} // <init>(StringPool,boolean,boolean)
//
// Public methods
//
/** Returns the cached parser.getNamespaces() value.*/
boolean getNamespacesEnabled() {
return fNamespacesEnabled;
}
// internal factory methods
/** Creates a document node in the table. */
public int createDocument() {
int nodeIndex = createNode(Node.DOCUMENT_NODE);
return nodeIndex;
}
/** Creates a doctype. */
public int createDocumentType(int rootElementNameIndex, int publicId, int systemId) {
// create node
int nodeIndex = createNode(Node.DOCUMENT_TYPE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// added for DOM2: createDoctype factory method includes
// name, publicID, systemID
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id
setChunkIndex(fNodeName, rootElementNameIndex, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
// return node index
return nodeIndex;
} // createDocumentType(int,int,int):int
public void setInternalSubset(int doctypeIndex, int subsetIndex) {
int chunk = doctypeIndex >> CHUNK_SHIFT;
int index = doctypeIndex & CHUNK_MASK;
int extraDataIndex = fNodeValue[chunk][index];
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
fNodeLastChild[echunk][eindex] = subsetIndex;
}
/** Creates a notation in the table. */
public int createNotation(int notationName, int publicId, int systemId) throws Exception {
// create node
int nodeIndex = createNode(Node.NOTATION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id, and notation name
setChunkIndex(fNodeName, notationName, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
// return node index
return nodeIndex;
} // createNotation(int,int,int):int
/** Creates an entity in the table. */
public int createEntity(int entityName, int publicId, int systemId, int notationName) throws Exception {
// create node
int nodeIndex = createNode(Node.ENTITY_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
// create extra data node
int extraDataIndex = createNode((short)0); // node type unimportant
int echunk = extraDataIndex >> CHUNK_SHIFT;
int eindex = extraDataIndex & CHUNK_MASK;
// save name, public id, system id, and notation name
setChunkIndex(fNodeName, entityName, chunk, index);
setChunkIndex(fNodeValue, extraDataIndex, chunk, index);
setChunkIndex(fNodeName, publicId, echunk, eindex);
setChunkIndex(fNodeValue, systemId, echunk, eindex);
setChunkIndex(fNodeLastChild, notationName, echunk, eindex);
// return node index
return nodeIndex;
} // createEntity(int,int,int,int):int
/** Creates an entity reference node in the table. */
public int createEntityReference(int nameIndex) throws Exception {
// create node
int nodeIndex = createNode(Node.ENTITY_REFERENCE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, nameIndex, chunk, index);
// return node index
return nodeIndex;
} // createEntityReference(int):int
/** Creates an element node in the table. */
public int createElement(int elementNameIndex, XMLAttrList attrList, int attrListIndex) {
// create node
int elementNodeIndex = createNode(Node.ELEMENT_NODE);
int elementChunk = elementNodeIndex >> CHUNK_SHIFT;
int elementIndex = elementNodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, elementNameIndex, elementChunk, elementIndex);
// create attributes
if (attrListIndex != -1) {
int first = attrList.getFirstAttr(attrListIndex);
int lastAttrNodeIndex = -1;
int lastAttrChunk = -1;
int lastAttrIndex = -1;
for (int index = first;
index != -1;
index = attrList.getNextAttr(index)) {
// create attribute
int attrNodeIndex = createAttribute(attrList.getAttrName(index),
attrList.getAttValue(index),
attrList.isSpecified(index));
int attrChunk = attrNodeIndex >> CHUNK_SHIFT;
int attrIndex = attrNodeIndex & CHUNK_MASK;
setChunkIndex(fNodeParent, elementNodeIndex, attrChunk, attrIndex);
// add links
if (index == first) {
setChunkIndex(fNodeValue, attrNodeIndex, elementChunk, elementIndex);
}
else {
setChunkIndex(fNodePrevSib, attrNodeIndex, lastAttrChunk, lastAttrIndex);
}
// save last chunk and index
lastAttrNodeIndex = attrNodeIndex;
lastAttrChunk = attrChunk;
lastAttrIndex = attrIndex;
}
}
// return node index
return elementNodeIndex;
} // createElement(int,XMLAttrList,int):int
/** Creates an attributes in the table. */
public int createAttribute(int attrNameIndex, int attrValueIndex,
boolean specified) {
// create node
int nodeIndex = createNode(NodeImpl.ATTRIBUTE_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, attrNameIndex, chunk, index);
setChunkIndex(fNodeValue, specified ? 1 : 0, chunk, index);
// append value as text node
int textNodeIndex = createTextNode(attrValueIndex, false);
appendChild(nodeIndex, textNodeIndex);
// return node index
return nodeIndex;
} // createAttribute(int,int,boolean):int
/** Creates an element definition in the table. */
public int createElementDefinition(int elementNameIndex) {
// create node
int nodeIndex = createNode(NodeImpl.ELEMENT_DEFINITION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, elementNameIndex, chunk, index);
// return node index
return nodeIndex;
} // createElementDefinition(int):int
/** Creates a text node in the table. */
public int createTextNode(int dataIndex, boolean ignorableWhitespace) {
// create node
int nodeIndex = createNode(Node.TEXT_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// use last child to store ignorableWhitespace info
setChunkIndex(fNodeLastChild,
ignorableWhitespace ? 1 : 0, chunk, index);
// return node index
return nodeIndex;
} // createTextNode(int,boolean):int
/** Creates a CDATA section node in the table. */
public int createCDATASection(int dataIndex, boolean ignorableWhitespace) {
// create node
int nodeIndex = createNode(Node.CDATA_SECTION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// use last child to store ignorableWhitespace info
setChunkIndex(fNodeLastChild,
ignorableWhitespace ? 1 : 0, chunk, index);
// return node index
return nodeIndex;
} // createCDATASection(int,boolean):int
/** Creates a processing instruction node in the table. */
public int createProcessingInstruction(int targetIndex, int dataIndex) {
// create node
int nodeIndex = createNode(Node.PROCESSING_INSTRUCTION_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeName, targetIndex, chunk, index);
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// return node index
return nodeIndex;
} // createProcessingInstruction(int,int):int
/** Creates a comment node in the table. */
public int createComment(int dataIndex) {
// create node
int nodeIndex = createNode(Node.COMMENT_NODE);
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
setChunkIndex(fNodeValue, dataIndex, chunk, index);
// return node index
return nodeIndex;
} // createComment(int):int
/** Appends a child to the specified parent in the table. */
public void appendChild(int parentIndex, int childIndex) {
// append parent index
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int cchunk = childIndex >> CHUNK_SHIFT;
int cindex = childIndex & CHUNK_MASK;
setChunkIndex(fNodeParent, parentIndex, cchunk, cindex);
// set previous sibling of new child
int olast = getChunkIndex(fNodeLastChild, pchunk, pindex);
setChunkIndex(fNodePrevSib, olast, cchunk, cindex);
// update parent's last child
setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
} // appendChild(int,int)
/** Adds an attribute node to the specified element. */
public int setAttributeNode(int elemIndex, int attrIndex) {
int echunk = elemIndex >> CHUNK_SHIFT;
int eindex = elemIndex & CHUNK_MASK;
int achunk = attrIndex >> CHUNK_SHIFT;
int aindex = attrIndex & CHUNK_MASK;
// see if this attribute is already here
String attrName =
fStringPool.toString(getChunkIndex(fNodeName, achunk, aindex));
int oldAttrIndex = getChunkIndex(fNodeValue, echunk, eindex);
int nextIndex = -1;
int oachunk = -1;
int oaindex = -1;
while (oldAttrIndex != -1) {
oachunk = oldAttrIndex >> CHUNK_SHIFT;
oaindex = oldAttrIndex & CHUNK_MASK;
String oldAttrName =
fStringPool.toString(getChunkIndex(fNodeName, oachunk, oaindex));
if (oldAttrName.equals(attrName)) {
break;
}
nextIndex = oldAttrIndex;
oldAttrIndex = getChunkIndex(fNodePrevSib, oachunk, oaindex);
}
// remove old attribute
if (oldAttrIndex != -1) {
// patch links
int prevIndex = getChunkIndex(fNodePrevSib, oachunk, oaindex);
if (nextIndex == -1) {
setChunkIndex(fNodeValue, prevIndex, echunk, eindex);
}
else {
int pchunk = nextIndex >> CHUNK_SHIFT;
int pindex = nextIndex & CHUNK_MASK;
setChunkIndex(fNodePrevSib, prevIndex, pchunk, pindex);
}
// remove connections to siblings
clearChunkIndex(fNodeType, oachunk, oaindex);
clearChunkIndex(fNodeName, oachunk, oaindex);
clearChunkIndex(fNodeValue, oachunk, oaindex);
clearChunkIndex(fNodeParent, oachunk, oaindex);
clearChunkIndex(fNodePrevSib, oachunk, oaindex);
int attrTextIndex =
clearChunkIndex(fNodeLastChild, oachunk, oaindex);
int atchunk = attrTextIndex >> CHUNK_SHIFT;
int atindex = attrTextIndex & CHUNK_MASK;
clearChunkIndex(fNodeType, atchunk, atindex);
clearChunkIndex(fNodeValue, atchunk, atindex);
clearChunkIndex(fNodeParent, atchunk, atindex);
clearChunkIndex(fNodeLastChild, atchunk, atindex);
}
// add new attribute
int prevIndex = getChunkIndex(fNodeValue, echunk, eindex);
setChunkIndex(fNodeValue, attrIndex, echunk, eindex);
setChunkIndex(fNodePrevSib, prevIndex, achunk, aindex);
// return
return oldAttrIndex;
} // setAttributeNode(int,int):int
/** Inserts a child before the specified node in the table. */
public int insertBefore(int parentIndex, int newChildIndex, int refChildIndex) {
if (refChildIndex == -1) {
appendChild(parentIndex, newChildIndex);
return newChildIndex;
}
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int nchunk = newChildIndex >> CHUNK_SHIFT;
int nindex = newChildIndex & CHUNK_MASK;
int rchunk = refChildIndex >> CHUNK_SHIFT;
int rindex = refChildIndex & CHUNK_MASK;
// 1) if ref.prev != null then
// begin
// new.prev := ref.prev;
// end;
int nextIndex = -1;
int index = -1;
for (index = getChunkIndex(fNodeLastChild, pchunk, pindex);
index != -1;
index = getChunkIndex(fNodePrevSib, index >> CHUNK_SHIFT, index & CHUNK_MASK)) {
if (nextIndex == refChildIndex) {
break;
}
nextIndex = index;
}
if (index != -1) {
int ochunk = index >> CHUNK_SHIFT;
int oindex = index & CHUNK_MASK;
setChunkIndex(fNodePrevSib, newChildIndex, ochunk, oindex);
}
// 2) ref.prev := new;
setChunkIndex(fNodePrevSib, refChildIndex, nchunk, nindex);
return newChildIndex;
} // insertBefore(int,int,int):int
/** Sets the last child of the parentIndex to childIndex. */
public void setAsLastChild(int parentIndex, int childIndex) {
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int chunk = childIndex >> CHUNK_SHIFT;
int index = childIndex & CHUNK_MASK;
setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
int prev = childIndex;
while (prev != -1) {
childIndex = prev;
prev = getChunkIndex(fNodePrevSib, chunk, index);
chunk = prev >> CHUNK_SHIFT;
index = prev & CHUNK_MASK;
}
} // setAsLastChild(int,int)
/** Sets the first child of the parentIndex to childIndex. */
public void setAsFirstChild(int parentIndex, int childIndex) {
/*
int pchunk = parentIndex >> CHUNK_SHIFT;
int pindex = parentIndex & CHUNK_MASK;
int chunk = childIndex >> CHUNK_SHIFT;
int index = childIndex & CHUNK_MASK;
setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
int prev = childIndex;
while (prev != -1) {
childIndex = prev;
prev = getChunkIndex(fNodePrevSib, chunk, index);
chunk = prev >> CHUNK_SHIFT;
index = prev & CHUNK_MASK;
}
*/
} // setAsLastChild(int,int)
// methods used when objects are "fluffed-up"
/**
* Returns the parent node of the given node.
* <em>Calling this method does not free the parent index.</em>
*/
public int getParentNode(int nodeIndex) {
return getParentNode(nodeIndex, false);
}
/**
* Returns the parent node of the given node.
* @param free True to free parent node.
*/
public int getParentNode(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeParent, chunk, index)
: getChunkIndex(fNodeParent, chunk, index);
} // getParentNode(int):int
/** Returns the last child of the given node. */
public int getLastChild(int nodeIndex) {
return getLastChild(nodeIndex, true);
}
/**
* Returns the last child of the given node.
* @param free True to free child index.
*/
public int getLastChild(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeLastChild, chunk, index)
: getChunkIndex(fNodeLastChild, chunk, index);
} // getLastChild(int,boolean):int
/**
* Returns the prev sibling of the given node.
* This is post-normalization of Text Nodes.
*/
public int getPrevSibling(int nodeIndex) {
return getPrevSibling(nodeIndex, true);
}
/**
* Returns the prev sibling of the given node.
* @param free True to free sibling index.
*/
public int getPrevSibling(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
nodeIndex = free ? clearChunkIndex(fNodePrevSib, chunk, index)
: getChunkIndex(fNodePrevSib, chunk, index);
while (nodeIndex != -1 && getChunkIndex(fNodeType, chunk, index) == Node.TEXT_NODE) {
nodeIndex = getChunkIndex(fNodePrevSib, chunk, index);
chunk = nodeIndex >> CHUNK_SHIFT;
index = nodeIndex & CHUNK_MASK;
}
return nodeIndex;
} // getPrevSibling(int,boolean):int
/**
* Returns the <i>real</i> prev sibling of the given node,
* directly from the data structures. Used by TextImpl#getNodeValue()
* to normalize values.
*/
public int getRealPrevSibling(int nodeIndex) {
return getRealPrevSibling(nodeIndex, true);
}
/**
* Returns the <i>real</i> prev sibling of the given node.
* @param free True to free sibling index.
*/
public int getRealPrevSibling(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodePrevSib, chunk, index)
: getChunkIndex(fNodePrevSib, chunk, index);
} // getReadPrevSibling(int,boolean):int
/**
* Returns the first child of the given node at an EXTREME PRICE!!
* @param free True to free child index.
*/
public int getFirstChild(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int index = getLastChild(nodeIndex, false);
while (index != -1) {
index = getPrevSibling(index, false);
}
return index;
} // getLastChild(int,boolean):int
/**
* Returns the next sibling of the given node at an EXTREME PRICE!!
* @param free True to free child index.
*/
public int getNextSibling(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int pindex = getParentNode(nodeIndex, false);
int index = getLastChild(pindex, false);
int next = -1;
while (index != nodeIndex && index != -1) {
next = index;
index = getPrevSibling(next, false);
}
return next;
} // getNextSibling(int,boolean):int
/**
* Returns the index of the element definition in the table
* with the specified name index, or -1 if no such definition
* exists.
*/
public int lookupElementDefinition(int elementNameIndex) {
if (fNodeCount > 1) {
// find doctype
int docTypeIndex = -1;
int nchunk = 0;
int nindex = 0;
for (int index = getChunkIndex(fNodeLastChild, nchunk, nindex);
index != -1;
index = getChunkIndex(fNodePrevSib, nchunk, nindex)) {
nchunk = index >> CHUNK_SHIFT;
nindex = index & CHUNK_MASK;
if (getChunkIndex(fNodeType, nchunk, nindex) == Node.DOCUMENT_TYPE_NODE) {
docTypeIndex = index;
break;
}
}
// find element definition
if (docTypeIndex == -1) {
return -1;
}
nchunk = docTypeIndex >> CHUNK_SHIFT;
nindex = docTypeIndex & CHUNK_MASK;
for (int index = getChunkIndex(fNodeLastChild, nchunk, nindex);
index != -1;
index = getChunkIndex(fNodePrevSib, nchunk, nindex)) {
nchunk = index >> CHUNK_SHIFT;
nindex = index & CHUNK_MASK;
if (getChunkIndex(fNodeName, nchunk, nindex) == elementNameIndex) {
return index;
}
}
}
return -1;
} // lookupElementDefinition(int):int
/** Instantiates the requested node object. */
public DeferredNode getNodeObject(int nodeIndex) {
// is there anything to do?
if (nodeIndex == -1) {
return null;
}
// get node type
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int type = clearChunkIndex(fNodeType, chunk, index);
clearChunkIndex(fNodeParent, chunk, index);
// create new node
DeferredNode node = null;
switch (type) {
//
// Standard DOM node types
//
case Node.ATTRIBUTE_NODE: {
if (fNamespacesEnabled) {
node = new DeferredAttrNSImpl(this, nodeIndex);
} else {
node = new DeferredAttrImpl(this, nodeIndex);
}
break;
}
case Node.CDATA_SECTION_NODE: {
node = new DeferredCDATASectionImpl(this, nodeIndex);
break;
}
case Node.COMMENT_NODE: {
node = new DeferredCommentImpl(this, nodeIndex);
break;
}
// NOTE: Document fragments can never be "fast".
//
// The parser will never ask to create a document
// fragment during the parse. Document fragments
// are used by the application *after* the parse.
//
// case Node.DOCUMENT_FRAGMENT_NODE: { break; }
case Node.DOCUMENT_NODE: {
// this node is never "fast"
node = this;
break;
}
case Node.DOCUMENT_TYPE_NODE: {
node = new DeferredDocumentTypeImpl(this, nodeIndex);
// save the doctype node
docType = (DocumentTypeImpl)node;
break;
}
case Node.ELEMENT_NODE: {
if (DEBUG_IDS) {
System.out.println("getNodeObject(ELEMENT_NODE): "+nodeIndex);
}
// create node
if (fNamespacesEnabled) {
node = new DeferredElementNSImpl(this, nodeIndex);
} else {
node = new DeferredElementImpl(this, nodeIndex);
}
// save the document element node
if (docElement == null) {
docElement = (ElementImpl)node;
}
// check to see if this element needs to be
// registered for its ID attributes
if (fIdElement != null) {
int idIndex = DeferredDocumentImpl.binarySearch(fIdElement, 0, fIdCount-1, nodeIndex);
while (idIndex != -1) {
if (DEBUG_IDS) {
System.out.println(" id index: "+idIndex);
System.out.println(" fIdName["+idIndex+
"]: "+fIdName[idIndex]);
}
// register ID
int nameIndex = fIdName[idIndex];
if (nameIndex != -1) {
String name = fStringPool.toString(nameIndex);
if (DEBUG_IDS) {
System.out.println(" name: "+name);
System.out.print("getNodeObject()#");
}
putIdentifier0(name, (Element)node);
fIdName[idIndex] = -1;
}
// continue if there are more IDs for
// this element
if (idIndex + 1 < fIdCount &&
fIdElement[idIndex + 1] == nodeIndex) {
idIndex++;
}
else {
idIndex = -1;
}
}
}
break;
}
case Node.ENTITY_NODE: {
node = new DeferredEntityImpl(this, nodeIndex);
break;
}
case Node.ENTITY_REFERENCE_NODE: {
node = new DeferredEntityReferenceImpl(this, nodeIndex);
break;
}
case Node.NOTATION_NODE: {
node = new DeferredNotationImpl(this, nodeIndex);
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
node = new DeferredProcessingInstructionImpl(this, nodeIndex);
break;
}
case Node.TEXT_NODE: {
node = new DeferredTextImpl(this, nodeIndex);
break;
}
//
// non-standard DOM node types
//
case NodeImpl.ELEMENT_DEFINITION_NODE: {
node = new DeferredElementDefinitionImpl(this, nodeIndex);
break;
}
default: {
throw new IllegalArgumentException("type: "+type);
}
} // switch node type
// store and return
if (node != null) {
return node;
}
// error
throw new IllegalArgumentException();
} // createNodeObject(int):Node
/** Returns the name of the given node. */
public String getNodeNameString(int nodeIndex) {
return getNodeNameString(nodeIndex, true);
} // getNodeNameString(int):String
/**
* Returns the name of the given node.
* @param free True to free the string index.
*/
public String getNodeNameString(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return null;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int nameIndex = free
? clearChunkIndex(fNodeName, chunk, index)
: getChunkIndex(fNodeName, chunk, index);
if (nameIndex == -1) {
return null;
}
return fStringPool.toString(nameIndex);
} // getNodeNameString(int,boolean):String
/** Returns the value of the given node. */
public String getNodeValueString(int nodeIndex) {
return getNodeValueString(nodeIndex, true);
} // getNodeValueString(int):String
/**
* Returns the value of the given node.
* @param free True to free the string index.
*/
public String getNodeValueString(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return null;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
int valueIndex = free
? clearChunkIndex(fNodeValue, chunk, index)
: getChunkIndex(fNodeValue, chunk, index);
if (valueIndex == -1) {
return null;
}
return fStringPool.toString(valueIndex);
} // getNodeValueString(int,boolean):String
/** Returns the real int name of the given node. */
public int getNodeName(int nodeIndex) {
return getNodeName(nodeIndex, true);
}
/**
* Returns the real int name of the given node.
* @param free True to free the name index.
*/
public int getNodeName(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeName, chunk, index)
: getChunkIndex(fNodeName, chunk, index);
} // getNodeName(int,boolean):int
/**
* Returns the real int value of the given node.
* Used by AttrImpl to store specified value (1 == true).
*/
public int getNodeValue(int nodeIndex) {
return getNodeValue(nodeIndex, true);
}
/**
* Returns the real int value of the given node.
* @param free True to free the value index.
*/
public int getNodeValue(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
return free ? clearChunkIndex(fNodeValue, chunk, index)
: getChunkIndex(fNodeValue, chunk, index);
} // getNodeValue(int,boolean):int
/** Returns the type of the given node. */
public short getNodeType(int nodeIndex) {
return getNodeType(nodeIndex, true);
}
/**
* Returns the type of the given node.
* @param True to free type index.
*/
public short getNodeType(int nodeIndex, boolean free) {
if (nodeIndex == -1) {
return -1;
}
int chunk = nodeIndex >> CHUNK_SHIFT;
int index = nodeIndex & CHUNK_MASK;
if (free) {
return (short)clearChunkIndex(fNodeType, chunk, index);
}
return (short)getChunkIndex(fNodeType, chunk, index);
} // getNodeType(int):int
// identifier maintenance
/** Registers an identifier name with a specified element node. */
public void putIdentifier(int nameIndex, int elementNodeIndex) {
if (DEBUG_IDS) {
System.out.println("putIdentifier("+nameIndex+", "+elementNodeIndex+')'+
" // "+
fStringPool.toString(nameIndex)+
", "+
fStringPool.toString(getChunkIndex(fNodeName, elementNodeIndex >> CHUNK_SHIFT, elementNodeIndex & CHUNK_MASK)));
}
// initialize arrays
if (fIdName == null) {
fIdName = new int[64];
fIdElement = new int[64];
}
// resize arrays
if (fIdCount == fIdName.length) {
int idName[] = new int[fIdCount * 2];
System.arraycopy(fIdName, 0, idName, 0, fIdCount);
fIdName = idName;
int idElement[] = new int[idName.length];
System.arraycopy(fIdElement, 0, idElement, 0, fIdCount);
fIdElement = idElement;
}
// store identifier
fIdName[fIdCount] = nameIndex;
fIdElement[fIdCount] = elementNodeIndex;
fIdCount++;
} // putIdentifier(int,int)
//
// DEBUG
//
/** Prints out the tables. */
public void print() {
if (DEBUG_PRINT_REF_COUNTS) {
System.out.print("num\t");
System.out.print("type\t");
System.out.print("name\t");
System.out.print("val\t");
System.out.print("par\t");
System.out.print("fch\t");
System.out.print("nsib");
System.out.println();
for (int i = 0; i < fNodeType.length; i++) {
if (fNodeType[i] != null) {
// separator
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.print("--------");
System.out.println();
// set count
System.out.print(i);
System.out.print('\t');
System.out.print(fNodeType[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeName[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeValue[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeParent[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodeLastChild[i][CHUNK_SIZE]);
System.out.print('\t');
System.out.print(fNodePrevSib[i][CHUNK_SIZE]);
System.out.println();
// clear count
System.out.print(i);
System.out.print('\t');
System.out.print(fNodeType[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeName[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeValue[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeParent[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodeLastChild[i][CHUNK_SIZE + 1]);
System.out.print('\t');
System.out.print(fNodePrevSib[i][CHUNK_SIZE + 1]);
System.out.println();
}
}
}
if (DEBUG_PRINT_TABLES) {
// This assumes that the document is small
System.out.println("# start table");
for (int i = 0; i < fNodeCount; i++) {
int chunk = i >> CHUNK_SHIFT;
int index = i & CHUNK_MASK;
if (i % 10 == 0) {
System.out.print("num\t");
System.out.print("type\t");
System.out.print("name\t");
System.out.print("val\t");
System.out.print("par\t");
System.out.print("fch\t");
System.out.print("nsib");
System.out.println();
}
System.out.print(i);
System.out.print('\t');
System.out.print(getChunkIndex(fNodeType, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeName, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeValue, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeParent, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodeLastChild, chunk, index));
System.out.print('\t');
System.out.print(getChunkIndex(fNodePrevSib, chunk, index));
/***
System.out.print(fNodeType[0][i]);
System.out.print('\t');
System.out.print(fNodeName[0][i]);
System.out.print('\t');
System.out.print(fNodeValue[0][i]);
System.out.print('\t');
System.out.print(fNodeParent[0][i]);
System.out.print('\t');
System.out.print(fNodeFirstChild[0][i]);
System.out.print('\t');
System.out.print(fNodeLastChild[0][i]);
System.out.print('\t');
System.out.print(fNodePrevSib[0][i]);
System.out.print('\t');
System.out.print(fNodeNextSib[0][i]);
/***/
System.out.println();
}
System.out.println("# end table");
}
} // print()
//
// DeferredNode methods
//
/** Returns the node index. */
public int getNodeIndex() {
return 0;
}
//
// Protected methods
//
/** access to string pool. */
protected StringPool getStringPool() {
return fStringPool;
}
/** Synchronizes the node's data. */
protected void synchronizeData() {
// no need to sync in the future
syncData(false);
// fluff up enough nodes to fill identifiers hash
if (fIdElement != null) {
// REVISIT: There has to be a more efficient way of
// doing this. But keep in mind that the
// tree can have been altered and re-ordered
// before all of the element nodes with ID
// attributes have been registered. For now
// this is reasonable and safe. -Ac
IntVector path = new IntVector();
for (int i = 0; i < fIdCount; i++) {
// ignore if it's already been registered
int elementNodeIndex = fIdElement[i];
int idNameIndex = fIdName[i];
if (idNameIndex == -1) {
continue;
}
// find path from this element to the root
path.removeAllElements();
int index = elementNodeIndex;
do {
path.addElement(index);
int pchunk = index >> CHUNK_SHIFT;
int pindex = index & CHUNK_MASK;
index = getChunkIndex(fNodeParent, pchunk, pindex);
} while (index != -1);
// Traverse path (backwards), fluffing the elements
// along the way. When this loop finishes, "place"
// will contain the reference to the element node
// we're interested in. -Ac
Node place = this;
for (int j = path.size() - 2; j >= 0; j--) {
index = path.elementAt(j);
Node child = place.getLastChild();
while (child != null) {
if (child instanceof DeferredNode) {
int nodeIndex = ((DeferredNode)child).getNodeIndex();
if (nodeIndex == index) {
place = child;
break;
}
}
child = child.getPreviousSibling();
}
}
// register the element
Element element = (Element)place;
String name = fStringPool.toString(idNameIndex);
putIdentifier0(name, element);
fIdName[i] = -1;
// see if there are more IDs on this element
while (fIdElement[i + 1] == elementNodeIndex) {
name = fStringPool.toString(fIdName[++i]);
putIdentifier0(name, element);
}
}
} // if identifiers
} // synchronizeData()
/**
* Synchronizes the node's children with the internal structure.
* Fluffing the children at once solves a lot of work to keep
* the two structures in sync. The problem gets worse when
* editing the tree -- this makes it a lot easier.
*/
protected void synchronizeChildren() {
if (syncData()) {
synchronizeData();
}
// no need to sync in the future
syncChildren(false);
getNodeType(0);
// create children and link them as siblings
ChildNode first = null;
ChildNode last = null;
for (int index = getLastChild(0);
index != -1;
index = getPrevSibling(index)) {
ChildNode node = (ChildNode)getNodeObject(index);
if (last == null) {
last = node;
}
else {
first.previousSibling = node;
}
node.ownerNode = this;
node.owned(true);
node.nextSibling = first;
first = node;
// save doctype and document type
int type = node.getNodeType();
if (type == Node.ELEMENT_NODE) {
docElement = (ElementImpl)node;
}
else if (type == Node.DOCUMENT_TYPE_NODE) {
docType = (DocumentTypeImpl)node;
}
}
if (first != null) {
firstChild = first;
first.firstChild(true);
lastChild(last);
}
} // synchronizeChildren()
// utility methods
/** Ensures that the internal tables are large enough. */
protected boolean ensureCapacity(int chunk, int index) {
// create buffers
if (fNodeType == null) {
fNodeType = new int[INITIAL_CHUNK_COUNT][];
fNodeName = new int[INITIAL_CHUNK_COUNT][];
fNodeValue = new int[INITIAL_CHUNK_COUNT][];
fNodeParent = new int[INITIAL_CHUNK_COUNT][];
fNodeLastChild = new int[INITIAL_CHUNK_COUNT][];
fNodePrevSib = new int[INITIAL_CHUNK_COUNT][];
}
// return true if table is already big enough
try {
return fNodeType[chunk][index] != 0;
}
// resize the tables
catch (ArrayIndexOutOfBoundsException ex) {
int newsize = chunk * 2;
int[][] newArray = new int[newsize][];
System.arraycopy(fNodeType, 0, newArray, 0, chunk);
fNodeType = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeName, 0, newArray, 0, chunk);
fNodeName = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeValue, 0, newArray, 0, chunk);
fNodeValue = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeParent, 0, newArray, 0, chunk);
fNodeParent = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodeLastChild, 0, newArray, 0, chunk);
fNodeLastChild = newArray;
newArray = new int[newsize][];
System.arraycopy(fNodePrevSib, 0, newArray, 0, chunk);
fNodePrevSib = newArray;
}
catch (NullPointerException ex) {
// ignore
}
// create chunks
createChunk(fNodeType, chunk);
createChunk(fNodeName, chunk);
createChunk(fNodeValue, chunk);
createChunk(fNodeParent, chunk);
createChunk(fNodeLastChild, chunk);
createChunk(fNodePrevSib, chunk);
// success
return true;
} // ensureCapacity(int,int):boolean
/** Creates a node of the specified type. */
protected int createNode(short nodeType) {
// ensure tables are large enough
int chunk = fNodeCount >> CHUNK_SHIFT;
int index = fNodeCount & CHUNK_MASK;
ensureCapacity(chunk, index);
// initialize node
setChunkIndex(fNodeType, nodeType, chunk, index);
// return node index number
return fNodeCount++;
} // createNode(short):int
/**
* Performs a binary search for a target value in an array of
* values. The array of values must be in ascending sorted order
* before calling this method and all array values must be
* non-negative.
*
* @param values The array of values to search.
* @param start The starting offset of the search.
* @param end The ending offset of the search.
* @param target The target value.
*
* @return This function will return the <i>first</i> occurrence
* of the target value, or -1 if the target value cannot
* be found.
*/
protected static int binarySearch(final int values[],
int start, int end, int target) {
if (DEBUG_IDS) {
System.out.println("binarySearch(), target: "+target);
}
// look for target value
while (start <= end) {
// is this the one we're looking for?
int middle = (start + end) / 2;
int value = values[middle];
if (DEBUG_IDS) {
System.out.print(" value: "+value+", target: "+target+" // ");
print(values, start, end, middle, target);
}
if (value == target) {
while (middle > 0 && values[middle - 1] == target) {
middle--;
}
if (DEBUG_IDS) {
System.out.println("FOUND AT "+middle);
}
return middle;
}
// is this point higher or lower?
if (value > target) {
end = middle - 1;
}
else {
start = middle + 1;
}
} // while
// not found
if (DEBUG_IDS) {
System.out.println("NOT FOUND!");
}
return -1;
} // binarySearch(int[],int,int,int):int
//
// Private methods
//
/** Creates the specified chunk in the given array of chunks. */
private final void createChunk(int data[][], int chunk) {
data[chunk] = new int[CHUNK_SIZE + 2];
for (int i = 0; i < CHUNK_SIZE; i++) {
data[chunk][i] = -1;
}
}
/**
* Sets the specified value in the given of data at the chunk and index.
*
* @return Returns the old value.
*/
private final int setChunkIndex(int data[][], int value, int chunk, int index) {
if (value == -1) {
return clearChunkIndex(data, chunk, index);
}
int ovalue = data[chunk][index];
if (ovalue == -1) {
data[chunk][CHUNK_SIZE]++;
}
data[chunk][index] = value;
return ovalue;
}
/**
* Returns the specified value in the given data at the chunk and index.
*/
private final int getChunkIndex(int data[][], int chunk, int index) {
return data[chunk] != null ? data[chunk][index] : -1;
}
/**
* Clears the specified value in the given data at the chunk and index.
* Note that this method will clear the given chunk if the reference
* count becomes zero.
*
* @return Returns the old value.
*/
private final int clearChunkIndex(int data[][], int chunk, int index) {
int value = data[chunk] != null ? data[chunk][index] : -1;
if (value != -1) {
data[chunk][CHUNK_SIZE + 1]++;
data[chunk][index] = -1;
if (data[chunk][CHUNK_SIZE] == data[chunk][CHUNK_SIZE + 1]) {
data[chunk] = null;
}
}
return value;
}
/**
* This version of putIdentifier is needed to avoid fluffing
* all of the paths to ID attributes when a node object is
* created that contains an ID attribute.
*/
private final void putIdentifier0(String idName, Element element) {
if (DEBUG_IDS) {
System.out.println("putIdentifier0("+
idName+", "+
element+')');
}
// create hashtable
if (identifiers == null) {
identifiers = new java.util.Hashtable();
}
// save ID and its associated element
identifiers.put(idName, element);
} // putIdentifier0(String,Element)
/** Prints the ID array. */
private static void print(int values[], int start, int end,
int middle, int target) {
if (DEBUG_IDS) {
System.out.print(start);
System.out.print(" [");
for (int i = start; i < end; i++) {
if (middle == i) {
System.out.print("!");
}
System.out.print(values[i]);
if (values[i] == target) {
System.out.print("*");
}
if (i < end - 1) {
System.out.print(" ");
}
}
System.out.println("] "+end);
}
} // print(int[],int,int,int,int)
//
// Classes
//
/**
* A simple integer vector.
*/
static class IntVector {
//
// Data
//
/** Data. */
private int data[];
/** Size. */
private int size;
//
// Public methods
//
/** Returns the length of this vector. */
public int size() {
return size;
}
/** Returns the element at the specified index. */
public int elementAt(int index) {
return data[index];
}
/** Appends an element to the end of the vector. */
public void addElement(int element) {
ensureCapacity(size + 1);
data[size++] = element;
}
/** Clears the vector. */
public void removeAllElements() {
size = 0;
}
//
// Private methods
//
/** Makes sure that there is enough storage. */
private void ensureCapacity(int newsize) {
if (data == null) {
data = new int[newsize + 15];
}
else if (newsize > data.length) {
int newdata[] = new int[newsize + 15];
System.arraycopy(data, 0, newdata, 0, data.length);
data = newdata;
}
} // ensureCapacity(int)
} // class IntVector
} // class DeferredDocumentImpl
| got rid of some unused code
git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@315435 13f79535-47bb-0310-9956-ffa450edef68
| src/org/apache/xerces/dom/DeferredDocumentImpl.java | got rid of some unused code | <ide><path>rc/org/apache/xerces/dom/DeferredDocumentImpl.java
<ide> int chunk = childIndex >> CHUNK_SHIFT;
<ide> int index = childIndex & CHUNK_MASK;
<ide> setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
<del>
<del> int prev = childIndex;
<del> while (prev != -1) {
<del> childIndex = prev;
<del> prev = getChunkIndex(fNodePrevSib, chunk, index);
<del> chunk = prev >> CHUNK_SHIFT;
<del> index = prev & CHUNK_MASK;
<del> }
<del>
<ide> } // setAsLastChild(int,int)
<del>
<del> /** Sets the first child of the parentIndex to childIndex. */
<del> public void setAsFirstChild(int parentIndex, int childIndex) {
<del> /*
<del> int pchunk = parentIndex >> CHUNK_SHIFT;
<del> int pindex = parentIndex & CHUNK_MASK;
<del> int chunk = childIndex >> CHUNK_SHIFT;
<del> int index = childIndex & CHUNK_MASK;
<del> setChunkIndex(fNodeLastChild, childIndex, pchunk, pindex);
<del>
<del> int prev = childIndex;
<del> while (prev != -1) {
<del> childIndex = prev;
<del> prev = getChunkIndex(fNodePrevSib, chunk, index);
<del> chunk = prev >> CHUNK_SHIFT;
<del> index = prev & CHUNK_MASK;
<del> }
<del> */
<del> } // setAsLastChild(int,int)
<del>
<del> // methods used when objects are "fluffed-up"
<ide>
<ide> /**
<ide> * Returns the parent node of the given node.
<ide> : getChunkIndex(fNodePrevSib, chunk, index);
<ide>
<ide> } // getReadPrevSibling(int,boolean):int
<del>
<del> /**
<del> * Returns the first child of the given node at an EXTREME PRICE!!
<del> * @param free True to free child index.
<del> */
<del> public int getFirstChild(int nodeIndex, boolean free) {
<del> if (nodeIndex == -1) {
<del> return -1;
<del> }
<del> int index = getLastChild(nodeIndex, false);
<del> while (index != -1) {
<del> index = getPrevSibling(index, false);
<del> }
<del> return index;
<del> } // getLastChild(int,boolean):int
<del>
<del> /**
<del> * Returns the next sibling of the given node at an EXTREME PRICE!!
<del> * @param free True to free child index.
<del> */
<del> public int getNextSibling(int nodeIndex, boolean free) {
<del> if (nodeIndex == -1) {
<del> return -1;
<del> }
<del> int pindex = getParentNode(nodeIndex, false);
<del> int index = getLastChild(pindex, false);
<del> int next = -1;
<del> while (index != nodeIndex && index != -1) {
<del> next = index;
<del> index = getPrevSibling(next, false);
<del> }
<del> return next;
<del> } // getNextSibling(int,boolean):int
<ide>
<ide> /**
<ide> * Returns the index of the element definition in the table |
|
Java | apache-2.0 | dc838449a4e698c3aaf7725a9bdee35bd9195354 | 0 | jagguli/intellij-community,dslomov/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,amith01994/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,semonte/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,adedayo/intellij-community,caot/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,nicolargo/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,xfournet/intellij-community,petteyg/intellij-community,retomerz/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,xfournet/intellij-community,izonder/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,caot/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,ibinti/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,signed/intellij-community,apixandru/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,izonder/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,kdwink/intellij-community,allotria/intellij-community,FHannes/intellij-community,kdwink/intellij-community,ibinti/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,blademainer/intellij-community,FHannes/intellij-community,vladmm/intellij-community,retomerz/intellij-community,apixandru/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,slisson/intellij-community,allotria/intellij-community,supersven/intellij-community,dslomov/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,vvv1559/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,hurricup/intellij-community,fitermay/intellij-community,diorcety/intellij-community,amith01994/intellij-community,FHannes/intellij-community,xfournet/intellij-community,supersven/intellij-community,vladmm/intellij-community,slisson/intellij-community,holmes/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,signed/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,tmpgit/intellij-community,semonte/intellij-community,diorcety/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,ibinti/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,kdwink/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,petteyg/intellij-community,caot/intellij-community,akosyakov/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,diorcety/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,da1z/intellij-community,holmes/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,izonder/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,clumsy/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,samthor/intellij-community,asedunov/intellij-community,petteyg/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,izonder/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,caot/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,kool79/intellij-community,supersven/intellij-community,clumsy/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,xfournet/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,signed/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,robovm/robovm-studio,holmes/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,slisson/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,apixandru/intellij-community,semonte/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,caot/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,ryano144/intellij-community,da1z/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,kool79/intellij-community,vladmm/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,ryano144/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,semonte/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,diorcety/intellij-community,slisson/intellij-community,semonte/intellij-community,holmes/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,holmes/intellij-community,orekyuu/intellij-community,slisson/intellij-community,signed/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,slisson/intellij-community,caot/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,caot/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,petteyg/intellij-community,petteyg/intellij-community,kdwink/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,adedayo/intellij-community,kdwink/intellij-community,FHannes/intellij-community,allotria/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ryano144/intellij-community,supersven/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,semonte/intellij-community,dslomov/intellij-community,apixandru/intellij-community,da1z/intellij-community,fitermay/intellij-community,signed/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,semonte/intellij-community,blademainer/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,robovm/robovm-studio,FHannes/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,jagguli/intellij-community,robovm/robovm-studio,blademainer/intellij-community,ibinti/intellij-community,asedunov/intellij-community,jagguli/intellij-community,caot/intellij-community,ahb0327/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,apixandru/intellij-community,kdwink/intellij-community,adedayo/intellij-community,blademainer/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,hurricup/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,amith01994/intellij-community,robovm/robovm-studio,FHannes/intellij-community,supersven/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,allotria/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,signed/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,hurricup/intellij-community,slisson/intellij-community,apixandru/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,jagguli/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,holmes/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,ibinti/intellij-community,izonder/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,slisson/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,holmes/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,samthor/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,vladmm/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,kool79/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,kool79/intellij-community,amith01994/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,caot/intellij-community,hurricup/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,FHannes/intellij-community,ibinti/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,vladmm/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,holmes/intellij-community,kdwink/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,signed/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,kool79/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,clumsy/intellij-community,samthor/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,ryano144/intellij-community,da1z/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,fnouama/intellij-community,clumsy/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,hurricup/intellij-community,jagguli/intellij-community,kool79/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,retomerz/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,dslomov/intellij-community,apixandru/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,fitermay/intellij-community,signed/intellij-community | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.ui.tree.nodes;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.AppUIUtil;
import com.intellij.ui.ColoredTextContainer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.NotNullFunction;
import com.intellij.util.ObjectUtils;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.impl.XDebugSessionImpl;
import com.intellij.xdebugger.impl.frame.XValueMarkers;
import com.intellij.xdebugger.impl.ui.DebuggerUIUtil;
import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants;
import com.intellij.xdebugger.impl.ui.tree.ValueMarkup;
import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.MouseEvent;
import java.util.Comparator;
/**
* @author nik
*/
public class XValueNodeImpl extends XValueContainerNode<XValue>
implements XValueNode, XCompositeNode, XValueNodePresentationConfigurator.ConfigurableXValueNode {
public static final Comparator<XValueNodeImpl> COMPARATOR = new Comparator<XValueNodeImpl>() {
@Override
public int compare(XValueNodeImpl o1, XValueNodeImpl o2) {
//noinspection ConstantConditions
return StringUtil.naturalCompare(o1.getName(), o2.getName());
}
};
private final String myName;
private String myType;
@Nullable
private String myValue;
private XFullValueEvaluator myFullValueEvaluator;
private boolean myChanged;
private XValuePresenter myValuePresenter;
public XValueNodeImpl(XDebuggerTree tree, XDebuggerTreeNode parent, String name, @NotNull XValue value) {
super(tree, parent, value);
myName = name;
value.computePresentation(this, XValuePlace.TREE);
// add "Collecting" message only if computation is not yet done
if (!isComputed()) {
if (myName != null) {
myText.append(myName, XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES);
myText.append(XDebuggerUIConstants.EQ_TEXT, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
myText.append(XDebuggerUIConstants.COLLECTING_DATA_MESSAGE, XDebuggerUIConstants.COLLECTING_DATA_HIGHLIGHT_ATTRIBUTES);
}
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @Nullable String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @NotNull String separator,
@NonNls @Nullable String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, separator, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String value,
@Nullable NotNullFunction<String, String> valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, valuePresenter, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String value,
@Nullable XValuePresenter valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, valuePresenter, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String separator,
@NonNls @NotNull String value,
final @Nullable NotNullFunction<String, String> valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, separator, valuePresenter, hasChildren, this);
}
@Override
public void setGroupingPresentation(@Nullable Icon icon,
@NonNls @Nullable String value,
@Nullable XValuePresenter valuePresenter,
boolean expand) {
XValueNodePresentationConfigurator.setGroupingPresentation(icon, value, valuePresenter, expand, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String value,
@Nullable XValuePresenter valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, value, valuePresenter, hasChildren, this);
}
@Override
public void applyPresentation(@Nullable Icon icon,
@Nullable String type,
@Nullable String value,
@NotNull XValuePresenter valuePresenter,
boolean hasChildren,
boolean expand) {
setIcon(icon);
myValue = value;
myType = type;
myValuePresenter = valuePresenter;
updateText();
setLeaf(!hasChildren);
fireNodeChanged();
myTree.nodeLoaded(this, myName, value);
if (expand) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!isObsolete()) {
myTree.expandPath(getPath());
}
}
});
}
}
@Override
public void setFullValueEvaluator(@NotNull final XFullValueEvaluator fullValueEvaluator) {
AppUIUtil.invokeOnEdt(new Runnable() {
@Override
public void run() {
myFullValueEvaluator = fullValueEvaluator;
fireNodeChanged();
}
});
}
private void updateText() {
myText.clear();
if (myTree.getSession() instanceof XDebugSessionImpl) {
XValueMarkers<?, ?> markers = ((XDebugSessionImpl)myTree.getSession()).getValueMarkers();
if (markers != null) {
ValueMarkup markup = markers.getMarkup(myValueContainer);
if (markup != null) {
myText.append("[" + markup.getText() + "] ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, markup.getColor()));
}
}
}
appendName();
buildText(myType, myValue, myValuePresenter, myText, myChanged);
}
private void appendName() {
if (!StringUtil.isEmpty(myName)) {
StringValuePresenter.append(myName, myText,
ObjectUtils.notNull(myValuePresenter.getNameAttributes(), XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES),
MAX_VALUE_LENGTH, null);
}
}
public static void buildText(@Nullable String type,
@Nullable String value,
@NotNull XValuePresenter valuePresenter,
@NotNull ColoredTextContainer text,
boolean changed) {
if (value != null) {
valuePresenter.appendSeparator(text);
}
if (type != null) {
text.append("{" + type + "} ", XDebuggerUIConstants.TYPE_ATTRIBUTES);
}
if (value != null) {
valuePresenter.append(value, text, changed);
}
}
public void markChanged() {
if (myChanged) return;
ApplicationManager.getApplication().assertIsDispatchThread();
myChanged = true;
if (myName != null && myValue != null) {
updateText();
fireNodeChanged();
}
}
@Nullable
public XFullValueEvaluator getFullValueEvaluator() {
return myFullValueEvaluator;
}
@Nullable
@Override
protected XDebuggerTreeNodeHyperlink getLink() {
if (myFullValueEvaluator != null) {
return new XDebuggerTreeNodeHyperlink(myFullValueEvaluator.getLinkText()) {
@Override
public void onClick(MouseEvent event) {
DebuggerUIUtil.showValuePopup(myFullValueEvaluator, event, myTree.getProject());
}
};
}
return null;
}
@Nullable
public String getName() {
return myName;
}
@Nullable
public XValuePresenter getValuePresenter() {
return myValuePresenter;
}
@Nullable
public String getType() {
return myType;
}
@Nullable
public String getValue() {
return myValue;
}
public boolean isComputed() {
return myValuePresenter != null;
}
public void setValueModificationStarted() {
ApplicationManager.getApplication().assertIsDispatchThread();
myValue = null;
myText.clear();
appendName();
myValuePresenter.appendSeparator(myText);
myText.append(XDebuggerUIConstants.MODIFYING_VALUE_MESSAGE, XDebuggerUIConstants.MODIFYING_VALUE_HIGHLIGHT_ATTRIBUTES);
setLeaf(true);
fireNodeStructureChanged();
}
@Override
public String toString() {
return getName();
}
} | platform/xdebugger-impl/src/com/intellij/xdebugger/impl/ui/tree/nodes/XValueNodeImpl.java | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.ui.tree.nodes;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.AppUIUtil;
import com.intellij.ui.ColoredTextContainer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.NotNullFunction;
import com.intellij.util.ObjectUtils;
import com.intellij.xdebugger.frame.*;
import com.intellij.xdebugger.impl.XDebugSessionImpl;
import com.intellij.xdebugger.impl.frame.XValueMarkers;
import com.intellij.xdebugger.impl.ui.DebuggerUIUtil;
import com.intellij.xdebugger.impl.ui.XDebuggerUIConstants;
import com.intellij.xdebugger.impl.ui.tree.ValueMarkup;
import com.intellij.xdebugger.impl.ui.tree.XDebuggerTree;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.MouseEvent;
import java.util.Comparator;
/**
* @author nik
*/
public class XValueNodeImpl extends XValueContainerNode<XValue> implements XValueNode, XCompositeNode, XValueNodePresentationConfigurator.ConfigurableXValueNode {
public static final Comparator<XValueNodeImpl> COMPARATOR = new Comparator<XValueNodeImpl>() {
@Override
public int compare(XValueNodeImpl o1, XValueNodeImpl o2) {
//noinspection ConstantConditions
return StringUtil.naturalCompare(o1.getName(), o2.getName());
}
};
private final String myName;
private String myType;
@Nullable
private String myValue;
private XFullValueEvaluator myFullValueEvaluator;
private boolean myChanged;
private XValuePresenter myValuePresenter;
public XValueNodeImpl(XDebuggerTree tree, XDebuggerTreeNode parent, String name, @NotNull XValue value) {
super(tree, parent, value);
myName = name;
value.computePresentation(this, XValuePlace.TREE);
// add "Collecting" message only if computation is not yet done
if (!isComputed()) {
if (myName != null) {
myText.append(myName, XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES);
myText.append(XDebuggerUIConstants.EQ_TEXT, SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
myText.append(XDebuggerUIConstants.COLLECTING_DATA_MESSAGE, XDebuggerUIConstants.COLLECTING_DATA_HIGHLIGHT_ATTRIBUTES);
}
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @Nullable String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon, @NonNls @Nullable String type, @NonNls @NotNull String separator,
@NonNls @Nullable String value, boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, separator, value, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String value,
@Nullable NotNullFunction<String, String> valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, valuePresenter, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String value,
@Nullable XValuePresenter valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, value, valuePresenter, hasChildren, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String type,
@NonNls @NotNull String separator,
@NonNls @NotNull String value,
final @Nullable NotNullFunction<String, String> valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, type, separator, valuePresenter, hasChildren, this);
}
@Override
public void setGroupingPresentation(@Nullable Icon icon,
@NonNls @Nullable String value,
@Nullable XValuePresenter valuePresenter,
boolean expand) {
XValueNodePresentationConfigurator.setGroupingPresentation(icon, value, valuePresenter, expand, this);
}
@Override
public void setPresentation(@Nullable Icon icon,
@NonNls @Nullable String value,
@Nullable XValuePresenter valuePresenter,
boolean hasChildren) {
XValueNodePresentationConfigurator.setPresentation(icon, value, valuePresenter, hasChildren, this);
}
@Override
public void applyPresentation(@Nullable Icon icon,
@Nullable String type,
@Nullable String value,
@NotNull XValuePresenter valuePresenter,
boolean hasChildren,
boolean expand) {
setIcon(icon);
myValue = value;
myType = type;
myValuePresenter = valuePresenter;
updateText();
setLeaf(!hasChildren);
fireNodeChanged();
myTree.nodeLoaded(this, myName, value);
if (expand) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (!isObsolete()) {
myTree.expandPath(getPath());
}
}
});
}
}
@Override
public void setFullValueEvaluator(@NotNull final XFullValueEvaluator fullValueEvaluator) {
AppUIUtil.invokeOnEdt(new Runnable() {
@Override
public void run() {
myFullValueEvaluator = fullValueEvaluator;
fireNodeChanged();
}
});
}
private void updateText() {
myText.clear();
XValueMarkers<?,?> markers = ((XDebugSessionImpl)myTree.getSession()).getValueMarkers();
if (markers != null) {
ValueMarkup markup = markers.getMarkup(myValueContainer);
if (markup != null) {
myText.append("[" + markup.getText() + "] ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, markup.getColor()));
}
}
appendName();
buildText(myType, myValue, myValuePresenter, myText, myChanged);
}
private void appendName() {
if (!StringUtil.isEmpty(myName)) {
StringValuePresenter.append(myName, myText,
ObjectUtils.notNull(myValuePresenter.getNameAttributes(), XDebuggerUIConstants.VALUE_NAME_ATTRIBUTES),
MAX_VALUE_LENGTH, null);
}
}
public static void buildText(@Nullable String type,
@Nullable String value,
@NotNull XValuePresenter valuePresenter,
@NotNull ColoredTextContainer text,
boolean changed) {
if (value != null) {
valuePresenter.appendSeparator(text);
}
if (type != null) {
text.append("{" + type + "} ", XDebuggerUIConstants.TYPE_ATTRIBUTES);
}
if (value != null) {
valuePresenter.append(value, text, changed);
}
}
public void markChanged() {
if (myChanged) return;
ApplicationManager.getApplication().assertIsDispatchThread();
myChanged = true;
if (myName != null && myValue != null) {
updateText();
fireNodeChanged();
}
}
@Nullable
public XFullValueEvaluator getFullValueEvaluator() {
return myFullValueEvaluator;
}
@Nullable
@Override
protected XDebuggerTreeNodeHyperlink getLink() {
if (myFullValueEvaluator != null) {
return new XDebuggerTreeNodeHyperlink(myFullValueEvaluator.getLinkText()) {
@Override
public void onClick(MouseEvent event) {
DebuggerUIUtil.showValuePopup(myFullValueEvaluator, event, myTree.getProject());
}
};
}
return null;
}
@Nullable
public String getName() {
return myName;
}
@Nullable
public XValuePresenter getValuePresenter() {
return myValuePresenter;
}
@Nullable
public String getType() {
return myType;
}
@Nullable
public String getValue() {
return myValue;
}
public boolean isComputed() {
return myValuePresenter != null;
}
public void setValueModificationStarted() {
ApplicationManager.getApplication().assertIsDispatchThread();
myValue = null;
myText.clear();
appendName();
myValuePresenter.appendSeparator(myText);
myText.append(XDebuggerUIConstants.MODIFYING_VALUE_MESSAGE, XDebuggerUIConstants.MODIFYING_VALUE_HIGHLIGHT_ATTRIBUTES);
setLeaf(true);
fireNodeStructureChanged();
}
@Override
public String toString() {
return getName();
}
} | Prevent CCE.
| platform/xdebugger-impl/src/com/intellij/xdebugger/impl/ui/tree/nodes/XValueNodeImpl.java | Prevent CCE. | <ide><path>latform/xdebugger-impl/src/com/intellij/xdebugger/impl/ui/tree/nodes/XValueNodeImpl.java
<ide> /**
<ide> * @author nik
<ide> */
<del>public class XValueNodeImpl extends XValueContainerNode<XValue> implements XValueNode, XCompositeNode, XValueNodePresentationConfigurator.ConfigurableXValueNode {
<add>public class XValueNodeImpl extends XValueContainerNode<XValue>
<add> implements XValueNode, XCompositeNode, XValueNodePresentationConfigurator.ConfigurableXValueNode {
<ide> public static final Comparator<XValueNodeImpl> COMPARATOR = new Comparator<XValueNodeImpl>() {
<ide> @Override
<ide> public int compare(XValueNodeImpl o1, XValueNodeImpl o2) {
<ide>
<ide> private void updateText() {
<ide> myText.clear();
<del> XValueMarkers<?,?> markers = ((XDebugSessionImpl)myTree.getSession()).getValueMarkers();
<del> if (markers != null) {
<del> ValueMarkup markup = markers.getMarkup(myValueContainer);
<del> if (markup != null) {
<del> myText.append("[" + markup.getText() + "] ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, markup.getColor()));
<add> if (myTree.getSession() instanceof XDebugSessionImpl) {
<add> XValueMarkers<?, ?> markers = ((XDebugSessionImpl)myTree.getSession()).getValueMarkers();
<add> if (markers != null) {
<add> ValueMarkup markup = markers.getMarkup(myValueContainer);
<add> if (markup != null) {
<add> myText.append("[" + markup.getText() + "] ", new SimpleTextAttributes(SimpleTextAttributes.STYLE_BOLD, markup.getColor()));
<add> }
<ide> }
<ide> }
<ide> appendName(); |
|
Java | agpl-3.0 | 4c307e97a167a0c84ac747b5eb602d1bc97aeda7 | 0 | PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,KinshipSoftware/KinOathKinshipArchiver | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package nl.mpi.kinnate.gedcomimport;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.List;
import javax.swing.JProgressBar;
import javax.swing.JTextArea;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import nl.mpi.arbil.GuiHelper;
import nl.mpi.arbil.LinorgSessionStorage;
import nl.mpi.arbil.clarin.CmdiComponentBuilder;
import nl.mpi.arbil.data.ImdiLoader;
import nl.mpi.arbil.data.ImdiTreeObject;
import nl.mpi.arbil.data.MetadataBuilder;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* Document : GedcomImporter
* Created on : Aug 24, 2010, 2:40:21 PM
* Author : Peter Withers
*/
public class GedcomImporter {
private int inputLineCount;
private String inputFileMd5Sim;
JProgressBar progressBar = null;
private void appendToTaskOutput(JTextArea importTextArea, String lineOfText) {
importTextArea.append(lineOfText + "\n");
importTextArea.setCaretPosition(importTextArea.getText().length());
}
public void setProgressBar(JProgressBar progressBarLocal) {
progressBar = progressBarLocal;
}
private void calculateFileNameAndFileLength(BufferedReader bufferedReader) {
// count the lines in the file (for progress) and calculate the md5 sum (for unique file naming)
try {
MessageDigest digest = MessageDigest.getInstance("MD5");
StringBuilder hexString = new StringBuilder();
String strLine;
inputLineCount = 0;
while ((strLine = bufferedReader.readLine()) != null) {
digest.update(strLine.getBytes());
inputLineCount++;
}
byte[] md5sum = digest.digest();
for (int byteCounter = 0; byteCounter < md5sum.length; ++byteCounter) {
hexString.append(Integer.toHexString(0x0100 + (md5sum[byteCounter] & 0x00FF)).substring(1));
}
inputFileMd5Sim = hexString.toString();
} catch (NoSuchAlgorithmException algorithmException) {
GuiHelper.linorgBugCatcher.logError(algorithmException);
} catch (IOException iOException) {
GuiHelper.linorgBugCatcher.logError(iOException);
}
}
public void importTestFile(JTextArea importTextArea, File testFile) {
try {
FileInputStream fstream = new FileInputStream(testFile);
DataInputStream in = new DataInputStream(fstream);
calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(in)));
importTestFile(importTextArea, new InputStreamReader(in));
} catch (FileNotFoundException exception) {
// todo: handle this
}
}
public void importTestFile(JTextArea importTextArea, String testFileString) {
calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(testFileString))));
importTestFile(importTextArea, new InputStreamReader(getClass().getResourceAsStream(testFileString)));
}
public void importTestFile(JTextArea importTextArea, InputStreamReader inputStreamReader) {
ArrayList<ImdiTreeObject> createdNodes = new ArrayList<ImdiTreeObject>();
Hashtable<String, String> createdNodesTable = new Hashtable<String, String>();
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
// ArrayList<ImdiTreeObject> linkNodes = new ArrayList<ImdiTreeObject>();
// really should close the file properly but this is only for testing at this stage
// URI targetFileURI = LinorgSessionStorage.getSingleInstance().getNewImdiFileName(LinorgSessionStorage.getSingleInstance().getCacheDirectory(), gedcomXsdLocation);
CmdiComponentBuilder componentBuilder = new CmdiComponentBuilder();
// try {
// targetFileURI = componentBuilder.createComponentFile(targetFileURI, this.getClass().getResource(gedcomXsdLocation).toURI(), false);
// } catch (URISyntaxException ex) {
// GuiHelper.linorgBugCatcher.logError(ex);
// return;
// }
MetadataBuilder metadataBuilder = new MetadataBuilder();
// metadataBuilder.addChildNode(gedcomImdiObject, ".Gedcom.Relation", null, null, null);
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.waitTillLoaded();
try {
String strLine;
int gedcomLevel = 0;
String xsdString = ""; // temp string to create the xsd
ArrayList<String> xsdTagsDone = new ArrayList<String>(); // temp array to create the xsd
ArrayList<String> gedcomLevelStrings = new ArrayList<String>();
ArrayList<String> xsdLevelStrings = new ArrayList<String>(); // temp array to create the xsd
ImdiTreeObject gedcomImdiObject = null;
Document metadataDom = null;
Element previousField = null;
Node currentDomNode = null;
String gedcomPreviousPath = "";
int currntLineCounter = 0;
while ((strLine = bufferedReader.readLine()) != null) {
String[] lineParts = strLine.split(" ", 3);
gedcomLevel = Integer.parseInt(lineParts[0]);
while (gedcomLevelStrings.size() > gedcomLevel) {
gedcomLevelStrings.remove(gedcomLevelStrings.size() - 1);
}
while (xsdLevelStrings.size() > gedcomLevel) {
xsdLevelStrings.remove(xsdLevelStrings.size() - 1);
xsdString += "</xs:sequence>\n</xs:complexType>\n</xs:element>\n";
//currentDomNode = currentDomNode.getParentNode();
}
gedcomLevelStrings.add(lineParts[1]);
System.out.println(strLine);
System.out.println("gedcomLevelString: " + gedcomLevelStrings);
appendToTaskOutput(importTextArea, strLine);
boolean lastFieldContinued = false;
if (lineParts[1].equals("CONT")) {
if (previousField != null) {
// todo: if the previous field is null this should be caught and handled as an error in the source file
previousField.setTextContent(previousField.getTextContent() + "\n" + lineParts[2]);
}
lastFieldContinued = true;
} else if (lineParts[1].equals("CONC")) {
if (previousField != null) {
// todo: if the previous field is null this should be caught and handled as an error in the source file
previousField.setTextContent(previousField.getTextContent() + lineParts[2]);
}
lastFieldContinued = true;
}
if (lastFieldContinued == false) {
previousField = null;
if (gedcomLevel == 0) {
// if (createdNodes.size() > 20) {
// appendToTaskOutput(importTextArea, "stopped import at node count: " + createdNodes.size());
// break;
// }
if (metadataDom != null) {
new CmdiComponentBuilder().savePrettyFormatting(metadataDom, gedcomImdiObject.getFile());
metadataDom = null;
}
if (lineParts[1].equals("TRLR")) {
appendToTaskOutput(importTextArea, "--> end of file found");
} else {
// String gedcomXsdLocation = "/xsd/gedcom-import.xsd";
String gedcomXsdLocation = "/xsd/gedcom-autogenerated.xsd";
URI eniryFileURI = LinorgSessionStorage.getSingleInstance().getNewImdiFileName(LinorgSessionStorage.getSingleInstance().getCacheDirectory(), gedcomXsdLocation);
try {
eniryFileURI = componentBuilder.createComponentFile(eniryFileURI, this.getClass().getResource(gedcomXsdLocation).toURI(), false);
} catch (URISyntaxException ex) {
GuiHelper.linorgBugCatcher.logError(ex);
appendToTaskOutput(importTextArea, "error: " + ex.getMessage());
return;
// } catch (org.apache.xmlbeans.XmlException ex) {
// GuiHelper.linorgBugCatcher.logError(ex);
// appendToTaskOutput(importTextArea, "error: " + ex.getMessage());
// return;
}
gedcomImdiObject = ImdiLoader.getSingleInstance().getImdiObject(null, eniryFileURI);
gedcomImdiObject.waitTillLoaded();
appendToTaskOutput(importTextArea, "--> InternalNameT1" + lineParts[1] + " : " + gedcomImdiObject.getUrlString());
createdNodesTable.put(lineParts[1], gedcomImdiObject.getUrlString());
createdNodes.add(gedcomImdiObject);
metadataDom = new CmdiComponentBuilder().getDocument(gedcomImdiObject.getURI());
currentDomNode = metadataDom.getDocumentElement();
// find the deepest element node to start adding child nodes to
for (Node childNode = currentDomNode.getFirstChild(); childNode != null; childNode = childNode.getNextSibling()) {
System.out.println("childNode: " + childNode);
System.out.println("childNodeType: " + childNode.getNodeType());
if (childNode.getNodeType() == Node.ELEMENT_NODE) {
System.out.println("entering node");
currentDomNode = childNode;
childNode = childNode.getFirstChild();
if (childNode == null) {
break;
}
}
}
if (lineParts[1].equals("HEAD")) {
// because the schema specifies 1:1 of both head and entity we find rather than create the head and entity nodes
Node headElement = currentDomNode;
// Element headElement = metadataDom.createElement("HEAD");
// currentDomNode.appendChild(headElement);
currentDomNode = headElement;
} else {
// because the schema specifies 1:1 of both head and entity we find rather than create the head and entity nodes
Node entityElement = null;
for (Node siblingNode = currentDomNode.getNextSibling(); siblingNode != null; siblingNode = siblingNode.getNextSibling()) {
if (siblingNode.getNodeName().equals("Entity")) {
entityElement = siblingNode;
break;
}
}
// Element entityElement = metadataDom.createElement("Entity");
// currentDomNode.appendChild(entityElement);
currentDomNode = entityElement;
// Element nameElement = metadataDom.createElement("NAME");
// currentDomNode.appendChild(nameElement);
System.out.println("currentDomElement: " + currentDomNode);
Node gedcomIdElement = null; // metadataDom.createElement("GedcomId");
Node gedcomTypeElement = null; // metadataDom.createElement("GedcomType");
// currentDomNode.appendChild(gedcomIdElement);
for (Node siblingNode = currentDomNode.getFirstChild(); siblingNode != null; siblingNode = siblingNode.getNextSibling()) {
if (siblingNode.getNodeName().equals("GedcomId")) {
gedcomIdElement = siblingNode;
}
if (siblingNode.getNodeName().equals("GedcomType")) {
gedcomTypeElement = siblingNode;
}
}
gedcomIdElement.setTextContent(lineParts[1]);
if (lineParts.length > 2) {
gedcomTypeElement.setTextContent(lineParts[2]);
// currentDomNode.appendChild(gedcomTypeElement);
if (lineParts[2].equals("NOTE")) {
Element addedNoteElement = metadataDom.createElement("NoteText");
currentDomNode.appendChild(addedNoteElement);
previousField = addedNoteElement;
}
}
}
System.out.println("currentDomElement: " + currentDomNode + " value: " + currentDomNode.getTextContent());
appendToTaskOutput(importTextArea, "--> new node started");
}
} else {
// if (lineParts.length > 2) {
// todo: move this into an array to be processed after all the fields have been insterted
//// gedcomImdiObject.saveChangesToCache(true);
// try {
// URI linkUri = metadataBuilder.addChildNode(gedcomImdiObject, ".Gedcom.Relation", null, null, null);
// ImdiTreeObject linkImdiObject = ImdiLoader.getSingleInstance().getImdiObject(null, linkUri);
// appendToTaskOutput(importTextArea, "--> gedcomImdiObject.getChildCount: " + gedcomImdiObject.getChildCount());
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.clearChildIcons();
// gedcomImdiObject.clearIcon();
//// gedcomImdiObject.waitTillLoaded();
// appendToTaskOutput(importTextArea, "--> link url: " + linkImdiObject.getUrlString());
//// appendToTaskOutput(importTextArea, "--> InternalNameT2" + lineParts[2] + " : " + linkImdiObject.getUrlString());
//// createdNodesTable.put(lineParts[2], linkImdiObject.getUrlString());
//// createdNodes.add(linkImdiObject.getUrlString());
//// System.out.println("keys: " + linkImdiObject.getFields().keys().nextElement());
// ImdiField[] currentField = linkImdiObject.getFields().get("Link");
// if (currentField != null && currentField.length > 0) {
// appendToTaskOutput(importTextArea, "--> Link" + lineParts[2]);
// // the target of this link might not be read in at this point so lets store the fields for updateing later
// //createdNodesTable.get(lineParts[2])
// currentField[0].setFieldValue(lineParts[2], false, true);
// linkNodes.add(linkImdiObject);
//// appendToTaskOutput(importTextArea, "--> link count: " + linkFields.size());
// }
// ImdiField[] currentField1 = linkImdiObject.getFields().get("Type");
// if (currentField1 != null && currentField1.length > 0) {
// appendToTaskOutput(importTextArea, "--> Type" + lineParts[1]);
// currentField1[0].setFieldValue(lineParts[1], false, true);
// }
// ImdiField[] currentField2 = linkImdiObject.getFields().get("TargetName");
// if (currentField2 != null && currentField2.length > 0) {
// appendToTaskOutput(importTextArea, "--> TargetName" + lineParts[2]);
// currentField2[0].setFieldValue(lineParts[2], false, true);
// }
// } catch (ArbilMetadataException arbilMetadataException) {
// System.err.println(arbilMetadataException.getMessage());
// }
// }
// }
// }
// trim the nodes to the current gedcom level
int parentNodeCount = 0;
for (Node countingDomNode = currentDomNode; countingDomNode != null; countingDomNode = countingDomNode.getParentNode()) {
parentNodeCount++;
}
for (int nodeCount = parentNodeCount; nodeCount > gedcomLevel + 3; nodeCount--) {
System.out.println("gedcomLevel: " + gedcomLevel + " parentNodeCount: " + parentNodeCount + " nodeCount: " + nodeCount + " exiting from node: " + currentDomNode);
currentDomNode = currentDomNode.getParentNode();
}
if (lineParts[1].equals("NAME") && currentDomNode.getNodeName().equals("Entity")) {
// find the existing node if only one should exist
System.out.println("Found Name Node easching: " + currentDomNode.getNodeName());
for (Node childNode = currentDomNode.getFirstChild(); childNode != null; childNode = childNode.getNextSibling()) {
System.out.println(childNode.getNodeName());
if (childNode.getNodeName().equals("NAME")) {
System.out.println("Using found node");
currentDomNode = childNode;
break;
}
}
} else {
System.out.println("Creating Node: " + lineParts[1]);
// otherwise add the current gedcom node
Element addedElement = metadataDom.createElement(lineParts[1]);
currentDomNode.appendChild(addedElement);
currentDomNode = addedElement;
}
// if the current line has a value then enter it into the node
if (lineParts.length > 2) {
// if (lineParts[1].equals("NAME")) {
// ImdiField[] currentField = gedcomImdiObject.getFields().get("Gedcom.Name");
// if (currentField != null && currentField.length > 0) {
// currentField[0].setFieldValue(lineParts[2], false, true);
// previousField = currentField;
// } else {
// System.err.println("missing field for: " + lineParts[1]);
// previousField = null;
// }
// } else {
String gedcomPath = "Kinnate.Gedcom";
// int loopLevelCount = 0;
// int nodeLevelCount = 0;
// Node nodeLevelCountNode = currentDomNode;
// while (nodeLevelCountNode != null) {
// nodeLevelCountNode = nodeLevelCountNode.getParentNode();
// nodeLevelCount++;
// }
for (String levelString : gedcomLevelStrings) {
if (levelString.startsWith("@")) {
// this could be handled better
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
levelString = "Entity";
}
gedcomPath = gedcomPath + "." + levelString;
// loopLevelCount++;
// if (loopLevelCount > nodeLevelCount) {
// Element addedElement = metadataDom.createElement(levelString);
// currentDomNode.appendChild(addedElement);
// currentDomNode = addedElement;
// }
}
List<String> swapList = Arrays.asList(new String[]{
"Kinnate.Gedcom.HEAD.SOUR",
"Kinnate.Gedcom.HEAD.CORP",
"Kinnate.Gedcom.HEAD.CORP.ADDR",
"Kinnate.Gedcom.HEAD.SOUR.DATA",
"Kinnate.Gedcom.HEAD.CHAN.DATE",
"Kinnate.Gedcom.HEAD.DATE",
"Kinnate.Gedcom.HEAD.CHAR",
"Kinnate.Gedcom.Entity.NAME",
"Kinnate.Gedcom.Entity.REFN",
"Kinnate.Gedcom.Entity.REPO",
"Kinnate.Gedcom.Entity.DATA",
"Kinnate.Gedcom.Entity.ENGA",
"Kinnate.Gedcom.Entity.ENGA.SOUR",
"Kinnate.Gedcom.Entity.MARB",
"Kinnate.Gedcom.Entity.MARB.SOUR",
"Kinnate.Gedcom.Entity.MARC",
"Kinnate.Gedcom.Entity.MARC.SOUR",
"Kinnate.Gedcom.Entity.MARL",
"Kinnate.Gedcom.Entity.MARL.SOUR",
"Kinnate.Gedcom.Entity.MARS",
"Kinnate.Gedcom.Entity.MARS.SOUR",
"Kinnate.Gedcom.Entity.DIV",
"Kinnate.Gedcom.Entity.DIV.SOUR",
"Kinnate.Gedcom.Entity.DIVF",
"Kinnate.Gedcom.Entity.DIVF.SOUR",
"Kinnate.Gedcom.Entity.DATA.EVEN",
"Kinnate.Gedcom.Entity.REPO.CALN",
"Kinnate.Gedcom.Entity.NAME.SOUR",
"Kinnate.Gedcom.Entity.ADDR",
"Kinnate.Gedcom.Entity.CHAN.DATE",
"Kinnate.Gedcom.Entity.DEAT",
"Kinnate.Gedcom.Entity.OBJE",
"Kinnate.Gedcom.HEAD.SOUR.CORP",
"Kinnate.Gedcom.HEAD.SOUR.CORP.ADDR"});
Element addedExtraElement = null;
if (swapList.contains(gedcomPath)) {
gedcomPath += "." + lineParts[1];
addedExtraElement = metadataDom.createElement(lineParts[1]);
currentDomNode.appendChild(addedExtraElement);
currentDomNode = addedExtraElement;
}
currentDomNode.setTextContent(/*gedcomPath + " : " +*/lineParts[2]);
if (addedExtraElement != null) {
addedExtraElement = null;
currentDomNode = currentDomNode.getParentNode();
}
// currentDomNode = currentDomNode.getParentNode();
// System.out.println("is template: " + gedcomImdiObject.nodeTemplate.pathIsChildNode(gedcomPath));
if (!xsdTagsDone.contains(gedcomPath)) {
while (gedcomLevelStrings.size() > xsdLevelStrings.size() + 1) {
String xsdLevelString = gedcomLevelStrings.get(xsdLevelStrings.size());
if (xsdLevelString.startsWith("@")) {
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
xsdLevelString = "NamedElement";
}
xsdLevelStrings.add(xsdLevelString);
xsdString += " <xs:element name=\"" + xsdLevelString + "\">\n";
xsdString += "<xs:complexType>\n<xs:sequence>\n";
}
// while (gedcomLevelStrings.size() < xsdLevelStrings.size()) {
// xsdLevelStrings.remove(xsdLevelStrings.size() - 1);
// xsdString += "</xs:sequence>\n</xs:complexType>\n";
// }
String xsdElementString = lineParts[1];
if (xsdElementString.startsWith("@")) {
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
xsdElementString = "NamedElement";
}
xsdString += " <xs:element name=\"" + xsdElementString + "\" />\n";// + gedcomPath + "\n" + strLine + "\n";
xsdTagsDone.add(gedcomPath);
}
// create the link node when required
if (lineParts[2].startsWith("@") && lineParts[2].endsWith("@")) {
appendToTaskOutput(importTextArea, "--> adding link");
Element relationElement = metadataDom.createElement("Relation");
metadataDom.getDocumentElement().appendChild(relationElement);
Element linkElement = metadataDom.createElement("Link");
linkElement.setTextContent(lineParts[2]);
relationElement.appendChild(linkElement);
Element typeElement = metadataDom.createElement("Type");
typeElement.setTextContent(gedcomPath);
relationElement.appendChild(typeElement);
Element targetNameElement = metadataDom.createElement("TargetName");
targetNameElement.setTextContent(lineParts[2]);
relationElement.appendChild(targetNameElement);
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getTextContent());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getNodeName());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getParentNode().getNodeName());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getParentNode().getParentNode().getNodeName());
}
}
}
}
currntLineCounter++;
int currentProgressPercent = (int) ((double) currntLineCounter / (double) inputLineCount * 100);
if (progressBar != null) {
progressBar.setValue(currentProgressPercent / 2);
}
}
if (metadataDom != null) {
new CmdiComponentBuilder().savePrettyFormatting(metadataDom, gedcomImdiObject.getFile());
metadataDom = null;
}
// ImdiLoader.getSingleInstance().saveNodesNeedingSave(true);
// appendToTaskOutput(importTextArea, "--> link count: " + linkFields.size());
// update all the links now we have the urls for each internal name
appendToTaskOutput(importTextArea, "xsdString:\n" + xsdString);
int linkNodesUpdated = 0;
for (ImdiTreeObject currentImdiObject : createdNodes) {
appendToTaskOutput(importTextArea, "linkParent: " + currentImdiObject.getUrlString());
try {
String linkXpath = "/Kinnate/Relation/Link";
Document linksDom = new CmdiComponentBuilder().getDocument(currentImdiObject.getURI());
NodeList relationLinkNodeList = org.apache.xpath.XPathAPI.selectNodeList(linksDom, linkXpath);
for (int nodeCounter = 0; nodeCounter < relationLinkNodeList.getLength(); nodeCounter++) {
Node relationLinkNode = relationLinkNodeList.item(nodeCounter);
if (relationLinkNode != null) {
// todo: update the links
// todo: create links in ego and alter but but the type info such as famc only in the relevant entity
String linkValue = createdNodesTable.get(relationLinkNode.getTextContent());
if (linkValue != null) {
relationLinkNode.setTextContent(linkValue);
appendToTaskOutput(importTextArea, "linkValue: " + linkValue);
}
}
}
new CmdiComponentBuilder().savePrettyFormatting(linksDom, currentImdiObject.getFile());
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
linkNodesUpdated++;
if (progressBar != null) {
progressBar.setValue((int) ((double) linkNodesUpdated / (double) createdNodes.size() * 100 / 2 + 50));
}
}
appendToTaskOutput(importTextArea, "import finished with a node count of: " + createdNodes.size());
// gedcomImdiObject.saveChangesToCache(true);
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.clearChildIcons();
// gedcomImdiObject.clearIcon();
ImdiLoader.getSingleInstance().saveNodesNeedingSave(true);
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
appendToTaskOutput(importTextArea, "error: " + exception.getMessage());
} catch (ParserConfigurationException parserConfigurationException) {
GuiHelper.linorgBugCatcher.logError(parserConfigurationException);
appendToTaskOutput(importTextArea, "error: " + parserConfigurationException.getMessage());
} catch (DOMException dOMException) {
GuiHelper.linorgBugCatcher.logError(dOMException);
appendToTaskOutput(importTextArea, "error: " + dOMException.getMessage());
} catch (SAXException sAXException) {
GuiHelper.linorgBugCatcher.logError(sAXException);
appendToTaskOutput(importTextArea, "error: " + sAXException.getMessage());
}
// LinorgSessionStorage.getSingleInstance().loadStringArray("KinGraphTree");
String[] createdNodePaths = new String[createdNodes.size()];
int createdNodeCounter = 0;
for (ImdiTreeObject currentImdiObject : createdNodes) {
createdNodePaths[createdNodeCounter] = currentImdiObject.getUrlString();
createdNodeCounter++;
}
LinorgSessionStorage.getSingleInstance().saveStringArray("KinGraphTree", createdNodePaths);
}
}
| src/main/java/nl/mpi/kinnate/gedcomimport/GedcomImporter.java | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package nl.mpi.kinnate.gedcomimport;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.List;
import javax.swing.JProgressBar;
import javax.swing.JTextArea;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import nl.mpi.arbil.GuiHelper;
import nl.mpi.arbil.LinorgSessionStorage;
import nl.mpi.arbil.clarin.CmdiComponentBuilder;
import nl.mpi.arbil.data.ImdiLoader;
import nl.mpi.arbil.data.ImdiTreeObject;
import nl.mpi.arbil.data.MetadataBuilder;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* Document : GedcomImporter
* Created on : Aug 24, 2010, 2:40:21 PM
* Author : Peter Withers
*/
public class GedcomImporter {
private int inputLineCounter;
private int currntLineCounter;
private int currentProgressPercent = 0;
private String inputFileMd5Sim;
JProgressBar progressBar = null;
private void appendToTaskOutput(JTextArea importTextArea, String lineOfText) {
importTextArea.append(lineOfText + "\n");
importTextArea.setCaretPosition(importTextArea.getText().length());
}
public int getProgress() {
return currentProgressPercent; // return percent of progress
}
public void setProgressBar(JProgressBar progressBarLocal) {
progressBar = progressBarLocal;
}
private void calculateFileNameAndFileLength(BufferedReader bufferedReader) {
// count the lines in the file (for progress) and calculate the md5 sum (for unique file naming)
try {
MessageDigest digest = MessageDigest.getInstance("MD5");
StringBuilder hexString = new StringBuilder();
String strLine;
inputLineCounter = 0;
currntLineCounter = 0;
while ((strLine = bufferedReader.readLine()) != null) {
digest.update(strLine.getBytes());
inputLineCounter++;
}
byte[] md5sum = digest.digest();
for (int byteCounter = 0; byteCounter < md5sum.length; ++byteCounter) {
hexString.append(Integer.toHexString(0x0100 + (md5sum[byteCounter] & 0x00FF)).substring(1));
}
inputFileMd5Sim = hexString.toString();
} catch (NoSuchAlgorithmException algorithmException) {
GuiHelper.linorgBugCatcher.logError(algorithmException);
} catch (IOException iOException) {
GuiHelper.linorgBugCatcher.logError(iOException);
}
}
public void importTestFile(JTextArea importTextArea, File testFile) {
try {
FileInputStream fstream = new FileInputStream(testFile);
DataInputStream in = new DataInputStream(fstream);
calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(in)));
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
importTestFile(importTextArea, bufferedReader);
} catch (FileNotFoundException exception) {
// todo: handle this
}
}
public void importTestFile(JTextArea importTextArea, String testFileString) {
calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(testFileString))));
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(testFileString)));
importTestFile(importTextArea, bufferedReader);
}
public void importTestFile(JTextArea importTextArea, BufferedReader bufferedReader) {
ArrayList<ImdiTreeObject> createdNodes = new ArrayList<ImdiTreeObject>();
Hashtable<String, String> createdNodesTable = new Hashtable<String, String>();
// ArrayList<ImdiTreeObject> linkNodes = new ArrayList<ImdiTreeObject>();
// really should close the file properly but this is only for testing at this stage
// URI targetFileURI = LinorgSessionStorage.getSingleInstance().getNewImdiFileName(LinorgSessionStorage.getSingleInstance().getCacheDirectory(), gedcomXsdLocation);
CmdiComponentBuilder componentBuilder = new CmdiComponentBuilder();
// try {
// targetFileURI = componentBuilder.createComponentFile(targetFileURI, this.getClass().getResource(gedcomXsdLocation).toURI(), false);
// } catch (URISyntaxException ex) {
// GuiHelper.linorgBugCatcher.logError(ex);
// return;
// }
MetadataBuilder metadataBuilder = new MetadataBuilder();
// metadataBuilder.addChildNode(gedcomImdiObject, ".Gedcom.Relation", null, null, null);
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.waitTillLoaded();
try {
String strLine;
int gedcomLevel = 0;
String xsdString = ""; // temp string to create the xsd
ArrayList<String> xsdTagsDone = new ArrayList<String>(); // temp array to create the xsd
ArrayList<String> gedcomLevelStrings = new ArrayList<String>();
ArrayList<String> xsdLevelStrings = new ArrayList<String>(); // temp array to create the xsd
ImdiTreeObject gedcomImdiObject = null;
Document metadataDom = null;
Element previousField = null;
Node currentDomNode = null;
String gedcomPreviousPath = "";
while ((strLine = bufferedReader.readLine()) != null) {
String[] lineParts = strLine.split(" ", 3);
gedcomLevel = Integer.parseInt(lineParts[0]);
while (gedcomLevelStrings.size() > gedcomLevel) {
gedcomLevelStrings.remove(gedcomLevelStrings.size() - 1);
}
while (xsdLevelStrings.size() > gedcomLevel) {
xsdLevelStrings.remove(xsdLevelStrings.size() - 1);
xsdString += "</xs:sequence>\n</xs:complexType>\n</xs:element>\n";
//currentDomNode = currentDomNode.getParentNode();
}
gedcomLevelStrings.add(lineParts[1]);
System.out.println(strLine);
System.out.println("gedcomLevelString: " + gedcomLevelStrings);
appendToTaskOutput(importTextArea, strLine);
boolean lastFieldContinued = false;
if (lineParts[1].equals("CONT")) {
if (previousField != null) {
// todo: if the previous field is null this should be caught and handled as an error in the source file
previousField.setTextContent(previousField.getTextContent() + "\n" + lineParts[2]);
}
lastFieldContinued = true;
} else if (lineParts[1].equals("CONC")) {
if (previousField != null) {
// todo: if the previous field is null this should be caught and handled as an error in the source file
previousField.setTextContent(previousField.getTextContent() + lineParts[2]);
}
lastFieldContinued = true;
}
if (lastFieldContinued == false) {
previousField = null;
if (gedcomLevel == 0) {
// if (createdNodes.size() > 20) {
// appendToTaskOutput(importTextArea, "stopped import at node count: " + createdNodes.size());
// break;
// }
if (metadataDom != null) {
new CmdiComponentBuilder().savePrettyFormatting(metadataDom, gedcomImdiObject.getFile());
metadataDom = null;
}
if (lineParts[1].equals("TRLR")) {
appendToTaskOutput(importTextArea, "--> end of file found");
} else {
// String gedcomXsdLocation = "/xsd/gedcom-import.xsd";
String gedcomXsdLocation = "/xsd/gedcom-autogenerated.xsd";
URI eniryFileURI = LinorgSessionStorage.getSingleInstance().getNewImdiFileName(LinorgSessionStorage.getSingleInstance().getCacheDirectory(), gedcomXsdLocation);
try {
eniryFileURI = componentBuilder.createComponentFile(eniryFileURI, this.getClass().getResource(gedcomXsdLocation).toURI(), false);
} catch (URISyntaxException ex) {
GuiHelper.linorgBugCatcher.logError(ex);
appendToTaskOutput(importTextArea, "error: " + ex.getMessage());
return;
// } catch (org.apache.xmlbeans.XmlException ex) {
// GuiHelper.linorgBugCatcher.logError(ex);
// appendToTaskOutput(importTextArea, "error: " + ex.getMessage());
// return;
}
gedcomImdiObject = ImdiLoader.getSingleInstance().getImdiObject(null, eniryFileURI);
gedcomImdiObject.waitTillLoaded();
appendToTaskOutput(importTextArea, "--> InternalNameT1" + lineParts[1] + " : " + gedcomImdiObject.getUrlString());
createdNodesTable.put(lineParts[1], gedcomImdiObject.getUrlString());
createdNodes.add(gedcomImdiObject);
metadataDom = new CmdiComponentBuilder().getDocument(gedcomImdiObject.getURI());
currentDomNode = metadataDom.getDocumentElement();
// find the deepest element node to start adding child nodes to
for (Node childNode = currentDomNode.getFirstChild(); childNode != null; childNode = childNode.getNextSibling()) {
System.out.println("childNode: " + childNode);
System.out.println("childNodeType: " + childNode.getNodeType());
if (childNode.getNodeType() == Node.ELEMENT_NODE) {
System.out.println("entering node");
currentDomNode = childNode;
childNode = childNode.getFirstChild();
if (childNode == null) {
break;
}
}
}
if (lineParts[1].equals("HEAD")) {
// because the schema specifies 1:1 of both head and entity we find rather than create the head and entity nodes
Node headElement = currentDomNode;
// Element headElement = metadataDom.createElement("HEAD");
// currentDomNode.appendChild(headElement);
currentDomNode = headElement;
} else {
// because the schema specifies 1:1 of both head and entity we find rather than create the head and entity nodes
Node entityElement = null;
for (Node siblingNode = currentDomNode.getNextSibling(); siblingNode != null; siblingNode = siblingNode.getNextSibling()) {
if (siblingNode.getNodeName().equals("Entity")) {
entityElement = siblingNode;
break;
}
}
// Element entityElement = metadataDom.createElement("Entity");
// currentDomNode.appendChild(entityElement);
currentDomNode = entityElement;
// Element nameElement = metadataDom.createElement("NAME");
// currentDomNode.appendChild(nameElement);
System.out.println("currentDomElement: " + currentDomNode);
Node gedcomIdElement = null; // metadataDom.createElement("GedcomId");
Node gedcomTypeElement = null; // metadataDom.createElement("GedcomType");
// currentDomNode.appendChild(gedcomIdElement);
for (Node siblingNode = currentDomNode.getFirstChild(); siblingNode != null; siblingNode = siblingNode.getNextSibling()) {
if (siblingNode.getNodeName().equals("GedcomId")) {
gedcomIdElement = siblingNode;
}
if (siblingNode.getNodeName().equals("GedcomType")) {
gedcomTypeElement = siblingNode;
}
}
gedcomIdElement.setTextContent(lineParts[1]);
if (lineParts.length > 2) {
gedcomTypeElement.setTextContent(lineParts[2]);
// currentDomNode.appendChild(gedcomTypeElement);
if (lineParts[2].equals("NOTE")) {
Element addedNoteElement = metadataDom.createElement("NoteText");
currentDomNode.appendChild(addedNoteElement);
previousField = addedNoteElement;
}
}
}
System.out.println("currentDomElement: " + currentDomNode + " value: " + currentDomNode.getTextContent());
appendToTaskOutput(importTextArea, "--> new node started");
}
} else {
// if (lineParts.length > 2) {
// todo: move this into an array to be processed after all the fields have been insterted
//// gedcomImdiObject.saveChangesToCache(true);
// try {
// URI linkUri = metadataBuilder.addChildNode(gedcomImdiObject, ".Gedcom.Relation", null, null, null);
// ImdiTreeObject linkImdiObject = ImdiLoader.getSingleInstance().getImdiObject(null, linkUri);
// appendToTaskOutput(importTextArea, "--> gedcomImdiObject.getChildCount: " + gedcomImdiObject.getChildCount());
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.clearChildIcons();
// gedcomImdiObject.clearIcon();
//// gedcomImdiObject.waitTillLoaded();
// appendToTaskOutput(importTextArea, "--> link url: " + linkImdiObject.getUrlString());
//// appendToTaskOutput(importTextArea, "--> InternalNameT2" + lineParts[2] + " : " + linkImdiObject.getUrlString());
//// createdNodesTable.put(lineParts[2], linkImdiObject.getUrlString());
//// createdNodes.add(linkImdiObject.getUrlString());
//// System.out.println("keys: " + linkImdiObject.getFields().keys().nextElement());
// ImdiField[] currentField = linkImdiObject.getFields().get("Link");
// if (currentField != null && currentField.length > 0) {
// appendToTaskOutput(importTextArea, "--> Link" + lineParts[2]);
// // the target of this link might not be read in at this point so lets store the fields for updateing later
// //createdNodesTable.get(lineParts[2])
// currentField[0].setFieldValue(lineParts[2], false, true);
// linkNodes.add(linkImdiObject);
//// appendToTaskOutput(importTextArea, "--> link count: " + linkFields.size());
// }
// ImdiField[] currentField1 = linkImdiObject.getFields().get("Type");
// if (currentField1 != null && currentField1.length > 0) {
// appendToTaskOutput(importTextArea, "--> Type" + lineParts[1]);
// currentField1[0].setFieldValue(lineParts[1], false, true);
// }
// ImdiField[] currentField2 = linkImdiObject.getFields().get("TargetName");
// if (currentField2 != null && currentField2.length > 0) {
// appendToTaskOutput(importTextArea, "--> TargetName" + lineParts[2]);
// currentField2[0].setFieldValue(lineParts[2], false, true);
// }
// } catch (ArbilMetadataException arbilMetadataException) {
// System.err.println(arbilMetadataException.getMessage());
// }
// }
// }
// }
// trim the nodes to the current gedcom level
int parentNodeCount = 0;
for (Node countingDomNode = currentDomNode; countingDomNode != null; countingDomNode = countingDomNode.getParentNode()) {
parentNodeCount++;
}
for (int nodeCount = parentNodeCount; nodeCount > gedcomLevel + 3; nodeCount--) {
System.out.println("gedcomLevel: " + gedcomLevel + " parentNodeCount: " + parentNodeCount + " nodeCount: " + nodeCount + " exiting from node: " + currentDomNode);
currentDomNode = currentDomNode.getParentNode();
}
if (lineParts[1].equals("NAME") && currentDomNode.getNodeName().equals("Entity")) {
// find the existing node if only one should exist
System.out.println("Found Name Node easching: " + currentDomNode.getNodeName());
for (Node childNode = currentDomNode.getFirstChild(); childNode != null; childNode = childNode.getNextSibling()) {
System.out.println(childNode.getNodeName());
if (childNode.getNodeName().equals("NAME")) {
System.out.println("Using found node");
currentDomNode = childNode;
break;
}
}
} else {
System.out.println("Creating Node: " + lineParts[1]);
// otherwise add the current gedcom node
Element addedElement = metadataDom.createElement(lineParts[1]);
currentDomNode.appendChild(addedElement);
currentDomNode = addedElement;
}
// if the current line has a value then enter it into the node
if (lineParts.length > 2) {
// if (lineParts[1].equals("NAME")) {
// ImdiField[] currentField = gedcomImdiObject.getFields().get("Gedcom.Name");
// if (currentField != null && currentField.length > 0) {
// currentField[0].setFieldValue(lineParts[2], false, true);
// previousField = currentField;
// } else {
// System.err.println("missing field for: " + lineParts[1]);
// previousField = null;
// }
// } else {
String gedcomPath = "Kinnate.Gedcom";
// int loopLevelCount = 0;
// int nodeLevelCount = 0;
// Node nodeLevelCountNode = currentDomNode;
// while (nodeLevelCountNode != null) {
// nodeLevelCountNode = nodeLevelCountNode.getParentNode();
// nodeLevelCount++;
// }
for (String levelString : gedcomLevelStrings) {
if (levelString.startsWith("@")) {
// this could be handled better
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
levelString = "Entity";
}
gedcomPath = gedcomPath + "." + levelString;
// loopLevelCount++;
// if (loopLevelCount > nodeLevelCount) {
// Element addedElement = metadataDom.createElement(levelString);
// currentDomNode.appendChild(addedElement);
// currentDomNode = addedElement;
// }
}
List<String> swapList = Arrays.asList(new String[]{
"Kinnate.Gedcom.HEAD.SOUR",
"Kinnate.Gedcom.HEAD.CORP",
"Kinnate.Gedcom.HEAD.CORP.ADDR",
"Kinnate.Gedcom.HEAD.SOUR.DATA",
"Kinnate.Gedcom.HEAD.CHAN.DATE",
"Kinnate.Gedcom.HEAD.DATE",
"Kinnate.Gedcom.HEAD.CHAR",
"Kinnate.Gedcom.Entity.NAME",
"Kinnate.Gedcom.Entity.REFN",
"Kinnate.Gedcom.Entity.REPO",
"Kinnate.Gedcom.Entity.DATA",
"Kinnate.Gedcom.Entity.ENGA",
"Kinnate.Gedcom.Entity.ENGA.SOUR",
"Kinnate.Gedcom.Entity.MARB",
"Kinnate.Gedcom.Entity.MARB.SOUR",
"Kinnate.Gedcom.Entity.MARC",
"Kinnate.Gedcom.Entity.MARC.SOUR",
"Kinnate.Gedcom.Entity.MARL",
"Kinnate.Gedcom.Entity.MARL.SOUR",
"Kinnate.Gedcom.Entity.MARS",
"Kinnate.Gedcom.Entity.MARS.SOUR",
"Kinnate.Gedcom.Entity.DIV",
"Kinnate.Gedcom.Entity.DIV.SOUR",
"Kinnate.Gedcom.Entity.DIVF",
"Kinnate.Gedcom.Entity.DIVF.SOUR",
"Kinnate.Gedcom.Entity.DATA.EVEN",
"Kinnate.Gedcom.Entity.REPO.CALN",
"Kinnate.Gedcom.Entity.NAME.SOUR",
"Kinnate.Gedcom.Entity.ADDR",
"Kinnate.Gedcom.Entity.CHAN.DATE",
"Kinnate.Gedcom.Entity.DEAT",
"Kinnate.Gedcom.Entity.OBJE",
"Kinnate.Gedcom.HEAD.SOUR.CORP",
"Kinnate.Gedcom.HEAD.SOUR.CORP.ADDR"});
Element addedExtraElement = null;
if (swapList.contains(gedcomPath)) {
gedcomPath += "." + lineParts[1];
addedExtraElement = metadataDom.createElement(lineParts[1]);
currentDomNode.appendChild(addedExtraElement);
currentDomNode = addedExtraElement;
}
currentDomNode.setTextContent(/*gedcomPath + " : " +*/lineParts[2]);
if (addedExtraElement != null) {
addedExtraElement = null;
currentDomNode = currentDomNode.getParentNode();
}
// currentDomNode = currentDomNode.getParentNode();
// System.out.println("is template: " + gedcomImdiObject.nodeTemplate.pathIsChildNode(gedcomPath));
if (!xsdTagsDone.contains(gedcomPath)) {
while (gedcomLevelStrings.size() > xsdLevelStrings.size() + 1) {
String xsdLevelString = gedcomLevelStrings.get(xsdLevelStrings.size());
if (xsdLevelString.startsWith("@")) {
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
xsdLevelString = "NamedElement";
}
xsdLevelStrings.add(xsdLevelString);
xsdString += " <xs:element name=\"" + xsdLevelString + "\">\n";
xsdString += "<xs:complexType>\n<xs:sequence>\n";
}
// while (gedcomLevelStrings.size() < xsdLevelStrings.size()) {
// xsdLevelStrings.remove(xsdLevelStrings.size() - 1);
// xsdString += "</xs:sequence>\n</xs:complexType>\n";
// }
String xsdElementString = lineParts[1];
if (xsdElementString.startsWith("@")) {
// this occurs at level 0 where the element type is named eg "0 @I9@ INDI"
xsdElementString = "NamedElement";
}
xsdString += " <xs:element name=\"" + xsdElementString + "\" />\n";// + gedcomPath + "\n" + strLine + "\n";
xsdTagsDone.add(gedcomPath);
}
// create the link node when required
if (lineParts[2].startsWith("@") && lineParts[2].endsWith("@")) {
appendToTaskOutput(importTextArea, "--> adding link");
Element relationElement = metadataDom.createElement("Relation");
metadataDom.getDocumentElement().appendChild(relationElement);
Element linkElement = metadataDom.createElement("Link");
linkElement.setTextContent(lineParts[2]);
relationElement.appendChild(linkElement);
Element typeElement = metadataDom.createElement("Type");
typeElement.setTextContent(gedcomPath);
relationElement.appendChild(typeElement);
Element targetNameElement = metadataDom.createElement("TargetName");
targetNameElement.setTextContent(lineParts[2]);
relationElement.appendChild(targetNameElement);
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getTextContent());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getNodeName());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getParentNode().getNodeName());
// appendToTaskOutput(importTextArea, "--> typeElement: " + typeElement.getParentNode().getParentNode().getNodeName());
}
}
}
}
currntLineCounter++;
currentProgressPercent = (int) ((double) currntLineCounter / (double) inputLineCounter * 100);
if (progressBar != null) {
progressBar.setValue(currentProgressPercent / 2);
}
}
if (metadataDom != null) {
new CmdiComponentBuilder().savePrettyFormatting(metadataDom, gedcomImdiObject.getFile());
metadataDom = null;
}
// ImdiLoader.getSingleInstance().saveNodesNeedingSave(true);
// appendToTaskOutput(importTextArea, "--> link count: " + linkFields.size());
// update all the links now we have the urls for each internal name
appendToTaskOutput(importTextArea, "xsdString:\n" + xsdString);
int linkNodesUpdated = 0;
for (ImdiTreeObject currentImdiObject : createdNodes) {
appendToTaskOutput(importTextArea, "linkParent: " + currentImdiObject.getUrlString());
try {
String linkXpath = "/Kinnate/Relation/Link";
Document linksDom = new CmdiComponentBuilder().getDocument(currentImdiObject.getURI());
NodeList relationLinkNodeList = org.apache.xpath.XPathAPI.selectNodeList(linksDom, linkXpath);
for (int nodeCounter = 0; nodeCounter < relationLinkNodeList.getLength(); nodeCounter++) {
Node relationLinkNode = relationLinkNodeList.item(nodeCounter);
if (relationLinkNode != null) {
// todo: update the links
// todo: create links in ego and alter but but the type info such as famc only in the relevant entity
String linkValue = createdNodesTable.get(relationLinkNode.getTextContent());
if (linkValue != null) {
relationLinkNode.setTextContent(linkValue);
appendToTaskOutput(importTextArea, "linkValue: " + linkValue);
}
}
}
new CmdiComponentBuilder().savePrettyFormatting(linksDom, currentImdiObject.getFile());
} catch (TransformerException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
}
linkNodesUpdated++;
if (progressBar != null) {
progressBar.setValue((int) ((double) linkNodesUpdated / (double) createdNodes.size() * 100 / 2 + 50));
}
}
appendToTaskOutput(importTextArea, "import finished with a node count of: " + createdNodes.size());
// gedcomImdiObject.saveChangesToCache(true);
// gedcomImdiObject.loadImdiDom();
// gedcomImdiObject.clearChildIcons();
// gedcomImdiObject.clearIcon();
ImdiLoader.getSingleInstance().saveNodesNeedingSave(true);
} catch (IOException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
appendToTaskOutput(importTextArea, "error: " + exception.getMessage());
} catch (ParserConfigurationException parserConfigurationException) {
GuiHelper.linorgBugCatcher.logError(parserConfigurationException);
appendToTaskOutput(importTextArea, "error: " + parserConfigurationException.getMessage());
} catch (DOMException dOMException) {
GuiHelper.linorgBugCatcher.logError(dOMException);
appendToTaskOutput(importTextArea, "error: " + dOMException.getMessage());
} catch (SAXException sAXException) {
GuiHelper.linorgBugCatcher.logError(sAXException);
appendToTaskOutput(importTextArea, "error: " + sAXException.getMessage());
}
// LinorgSessionStorage.getSingleInstance().loadStringArray("KinGraphTree");
String[] createdNodePaths = new String[createdNodes.size()];
int createdNodeCounter = 0;
for (ImdiTreeObject currentImdiObject : createdNodes) {
createdNodePaths[createdNodeCounter] = currentImdiObject.getUrlString();
createdNodeCounter++;
}
LinorgSessionStorage.getSingleInstance().saveStringArray("KinGraphTree", createdNodePaths);
}
}
| Added additional sample gecom files and made the import more flexible
| src/main/java/nl/mpi/kinnate/gedcomimport/GedcomImporter.java | Added additional sample gecom files and made the import more flexible | <ide><path>rc/main/java/nl/mpi/kinnate/gedcomimport/GedcomImporter.java
<ide> */
<ide> public class GedcomImporter {
<ide>
<del> private int inputLineCounter;
<del> private int currntLineCounter;
<del> private int currentProgressPercent = 0;
<add> private int inputLineCount;
<ide> private String inputFileMd5Sim;
<ide> JProgressBar progressBar = null;
<ide>
<ide> private void appendToTaskOutput(JTextArea importTextArea, String lineOfText) {
<ide> importTextArea.append(lineOfText + "\n");
<ide> importTextArea.setCaretPosition(importTextArea.getText().length());
<del> }
<del>
<del> public int getProgress() {
<del> return currentProgressPercent; // return percent of progress
<ide> }
<ide>
<ide> public void setProgressBar(JProgressBar progressBarLocal) {
<ide> MessageDigest digest = MessageDigest.getInstance("MD5");
<ide> StringBuilder hexString = new StringBuilder();
<ide> String strLine;
<del> inputLineCounter = 0;
<del> currntLineCounter = 0;
<add> inputLineCount = 0;
<ide> while ((strLine = bufferedReader.readLine()) != null) {
<ide> digest.update(strLine.getBytes());
<del> inputLineCounter++;
<add> inputLineCount++;
<ide> }
<ide> byte[] md5sum = digest.digest();
<ide> for (int byteCounter = 0; byteCounter < md5sum.length; ++byteCounter) {
<ide> FileInputStream fstream = new FileInputStream(testFile);
<ide> DataInputStream in = new DataInputStream(fstream);
<ide> calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(in)));
<del> BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
<del> importTestFile(importTextArea, bufferedReader);
<add> importTestFile(importTextArea, new InputStreamReader(in));
<ide> } catch (FileNotFoundException exception) {
<ide> // todo: handle this
<ide> }
<ide>
<ide> public void importTestFile(JTextArea importTextArea, String testFileString) {
<ide> calculateFileNameAndFileLength(new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(testFileString))));
<del> BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream(testFileString)));
<del> importTestFile(importTextArea, bufferedReader);
<add> importTestFile(importTextArea, new InputStreamReader(getClass().getResourceAsStream(testFileString)));
<ide> }
<ide>
<del> public void importTestFile(JTextArea importTextArea, BufferedReader bufferedReader) {
<add> public void importTestFile(JTextArea importTextArea, InputStreamReader inputStreamReader) {
<ide> ArrayList<ImdiTreeObject> createdNodes = new ArrayList<ImdiTreeObject>();
<ide> Hashtable<String, String> createdNodesTable = new Hashtable<String, String>();
<add> BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
<ide> // ArrayList<ImdiTreeObject> linkNodes = new ArrayList<ImdiTreeObject>();
<ide> // really should close the file properly but this is only for testing at this stage
<ide>
<ide> Node currentDomNode = null;
<ide>
<ide> String gedcomPreviousPath = "";
<add> int currntLineCounter = 0;
<ide> while ((strLine = bufferedReader.readLine()) != null) {
<ide> String[] lineParts = strLine.split(" ", 3);
<ide> gedcomLevel = Integer.parseInt(lineParts[0]);
<ide> }
<ide> }
<ide> currntLineCounter++;
<del> currentProgressPercent = (int) ((double) currntLineCounter / (double) inputLineCounter * 100);
<add> int currentProgressPercent = (int) ((double) currntLineCounter / (double) inputLineCount * 100);
<ide> if (progressBar != null) {
<ide> progressBar.setValue(currentProgressPercent / 2);
<ide> } |
|
Java | mit | 0784ab35482a2a93992bbce60065b66cd785d169 | 0 | aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips | // -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.HierarchyEvent;
import java.awt.event.HierarchyListener;
import java.awt.geom.AffineTransform;
import java.awt.geom.GeneralPath;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import javax.swing.*;
import javax.swing.plaf.basic.BasicProgressBarUI;
public final class MainPanel extends JPanel implements HierarchyListener {
private transient SwingWorker<String, Void> worker;
private MainPanel() {
super(new BorderLayout());
UIManager.put("ProgressBar.cycleTime", 1000);
UIManager.put("ProgressBar.repaintInterval", 10);
BoundedRangeModel model = new DefaultBoundedRangeModel();
JProgressBar progress1 = new JProgressBar(model);
progress1.setUI(new StripedProgressBarUI(true, true));
JProgressBar progress2 = new JProgressBar(model);
progress2.setUI(new StripedProgressBarUI(true, false));
JProgressBar progress3 = new JProgressBar(model);
progress3.setUI(new StripedProgressBarUI(false, true));
JProgressBar progress4 = new JProgressBar(model);
progress4.setUI(new StripedProgressBarUI(false, false));
List<JProgressBar> list = Arrays.asList(new JProgressBar(model), progress1, progress2, progress3, progress4);
JPanel p = new JPanel(new GridLayout(5, 1));
list.forEach(bar -> p.add(makePanel(bar)));
JButton button = new JButton("Test start");
button.addActionListener(e -> {
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
worker = new BackgroundTask();
list.forEach(bar -> {
bar.setIndeterminate(true);
worker.addPropertyChangeListener(new ProgressListener(bar));
});
worker.execute();
});
Box box = Box.createHorizontalBox();
box.add(Box.createHorizontalGlue());
box.add(button);
box.add(Box.createHorizontalStrut(5));
addHierarchyListener(this);
add(p);
add(box, BorderLayout.SOUTH);
setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
setPreferredSize(new Dimension(320, 240));
}
@Override public void hierarchyChanged(HierarchyEvent e) {
boolean isDisplayableChanged = (e.getChangeFlags() & HierarchyEvent.DISPLAYABILITY_CHANGED) != 0;
if (isDisplayableChanged && !e.getComponent().isDisplayable() && Objects.nonNull(worker)) {
System.out.println("DISPOSE_ON_CLOSE");
worker.cancel(true);
worker = null;
}
}
private static Component makePanel(Component cmp) {
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 5, 5, 5);
c.weightx = 1d;
JPanel p = new JPanel(new GridBagLayout());
p.add(cmp, c);
return p;
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class StripedProgressBarUI extends BasicProgressBarUI {
private final boolean dir;
private final boolean slope;
protected StripedProgressBarUI(boolean dir, boolean slope) {
super();
this.dir = dir;
this.slope = slope;
}
@Override protected int getBoxLength(int availableLength, int otherDimension) {
return availableLength; // (int) Math.round(availableLength / 6d);
}
@Override public void paintIndeterminate(Graphics g, JComponent c) {
// if (!(g instanceof Graphics2D)) {
// return;
// }
Insets b = progressBar.getInsets(); // area for border
int barRectWidth = progressBar.getWidth() - b.right - b.left;
int barRectHeight = progressBar.getHeight() - b.top - b.bottom;
if (barRectWidth <= 0 || barRectHeight <= 0) {
return;
}
// Paint the striped box.
boxRect = getBox(boxRect);
if (Objects.nonNull(boxRect)) {
int w = 10;
int x = getAnimationIndex();
GeneralPath p = new GeneralPath();
if (dir) {
p.moveTo(boxRect.x, boxRect.y);
p.lineTo(boxRect.x + w * .5f, boxRect.getMaxY());
p.lineTo(boxRect.x + (float) w, boxRect.getMaxY());
} else {
p.moveTo(boxRect.x, boxRect.getMaxY());
p.lineTo(boxRect.x + w * .5f, boxRect.getMaxY());
p.lineTo(boxRect.x + (float) w, boxRect.y);
}
p.lineTo(boxRect.x + w * .5f, boxRect.y);
p.closePath();
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setPaint(progressBar.getForeground());
if (slope) {
for (int i = boxRect.width + x; i > -w; i -= w) {
g2.fill(AffineTransform.getTranslateInstance(i, 0).createTransformedShape(p));
}
} else {
for (int i = -x; i < boxRect.width; i += w) {
g2.fill(AffineTransform.getTranslateInstance(i, 0).createTransformedShape(p));
}
}
g2.dispose();
}
}
}
class BackgroundTask extends SwingWorker<String, Void> {
@Override public String doInBackground() throws InterruptedException {
Thread.sleep(5000); // dummy task 1
int current = 0;
int lengthOfTask = 100;
while (current <= lengthOfTask && !isCancelled()) {
Thread.sleep(50); // dummy task 2
setProgress(100 * current / lengthOfTask);
current++;
}
return "Done";
}
}
class ProgressListener implements PropertyChangeListener {
private final JProgressBar progressBar;
protected ProgressListener(JProgressBar progressBar) {
this.progressBar = progressBar;
this.progressBar.setValue(0);
}
@Override public void propertyChange(PropertyChangeEvent e) {
String strPropertyName = e.getPropertyName();
if ("progress".equals(strPropertyName)) {
progressBar.setIndeterminate(false);
int progress = (Integer) e.getNewValue();
progressBar.setValue(progress);
}
}
}
| StripedProgressBar/src/java/example/MainPanel.java | // -*- mode:java; encoding:utf-8 -*-
// vim:set fileencoding=utf-8:
// @homepage@
package example;
import java.awt.*;
import java.awt.event.HierarchyEvent;
import java.awt.event.HierarchyListener;
import java.awt.geom.AffineTransform;
import java.awt.geom.GeneralPath;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import javax.swing.*;
import javax.swing.plaf.basic.BasicProgressBarUI;
public final class MainPanel extends JPanel implements HierarchyListener {
private transient SwingWorker<String, Void> worker;
private MainPanel() {
super(new BorderLayout());
UIManager.put("ProgressBar.cycleTime", 1000);
UIManager.put("ProgressBar.repaintInterval", 10);
BoundedRangeModel model = new DefaultBoundedRangeModel();
JProgressBar progress1 = new JProgressBar(model);
progress1.setUI(new StripedProgressBarUI(true, true));
JProgressBar progress2 = new JProgressBar(model);
progress2.setUI(new StripedProgressBarUI(true, false));
JProgressBar progress3 = new JProgressBar(model);
progress3.setUI(new StripedProgressBarUI(false, true));
JProgressBar progress4 = new JProgressBar(model);
progress4.setUI(new StripedProgressBarUI(false, false));
List<JProgressBar> list = Arrays.asList(new JProgressBar(model), progress1, progress2, progress3, progress4);
JPanel p = new JPanel(new GridLayout(5, 1));
list.forEach(bar -> p.add(makePanel(bar)));
JButton button = new JButton("Test start");
button.addActionListener(e -> {
if (Objects.nonNull(worker) && !worker.isDone()) {
worker.cancel(true);
}
worker = new BackgroundTask();
list.forEach(bar -> {
bar.setIndeterminate(true);
worker.addPropertyChangeListener(new ProgressListener(bar));
});
worker.execute();
});
Box box = Box.createHorizontalBox();
box.add(Box.createHorizontalGlue());
box.add(button);
box.add(Box.createHorizontalStrut(5));
addHierarchyListener(this);
add(p);
add(box, BorderLayout.SOUTH);
setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
setPreferredSize(new Dimension(320, 240));
}
@Override public void hierarchyChanged(HierarchyEvent e) {
boolean isDisplayableChanged = (e.getChangeFlags() & HierarchyEvent.DISPLAYABILITY_CHANGED) != 0;
if (isDisplayableChanged && !e.getComponent().isDisplayable() && Objects.nonNull(worker)) {
System.out.println("DISPOSE_ON_CLOSE");
worker.cancel(true);
worker = null;
}
}
private static Component makePanel(Component cmp) {
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 5, 5, 5);
c.weightx = 1d;
JPanel p = new JPanel(new GridBagLayout());
p.add(cmp, c);
return p;
}
public static void main(String[] args) {
EventQueue.invokeLater(MainPanel::createAndShowGui);
}
private static void createAndShowGui() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
ex.printStackTrace();
Toolkit.getDefaultToolkit().beep();
}
JFrame frame = new JFrame("@title@");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.getContentPane().add(new MainPanel());
frame.pack();
frame.setLocationRelativeTo(null);
frame.setVisible(true);
}
}
class StripedProgressBarUI extends BasicProgressBarUI {
private final boolean dir;
private final boolean slope;
protected StripedProgressBarUI(boolean dir, boolean slope) {
super();
this.dir = dir;
this.slope = slope;
}
@Override protected int getBoxLength(int availableLength, int otherDimension) {
return availableLength; // (int) Math.round(availableLength / 6d);
}
@Override public void paintIndeterminate(Graphics g, JComponent c) {
// if (!(g instanceof Graphics2D)) {
// return;
// }
Insets b = progressBar.getInsets(); // area for border
int barRectWidth = progressBar.getWidth() - b.right - b.left;
int barRectHeight = progressBar.getHeight() - b.top - b.bottom;
if (barRectWidth <= 0 || barRectHeight <= 0) {
return;
}
// Paint the striped box.
boxRect = getBox(boxRect);
if (Objects.nonNull(boxRect)) {
int w = 10;
int x = getAnimationIndex();
GeneralPath p = new GeneralPath();
if (dir) {
p.moveTo(boxRect.x, boxRect.y);
p.lineTo(boxRect.x + w * .5f, boxRect.y + boxRect.height);
p.lineTo(boxRect.x + w, boxRect.y + boxRect.height);
p.lineTo(boxRect.x + w * .5f, boxRect.y);
} else {
p.moveTo(boxRect.x, boxRect.y + boxRect.height);
p.lineTo(boxRect.x + w * .5f, boxRect.y + boxRect.height);
p.lineTo(boxRect.x + w, boxRect.y);
p.lineTo(boxRect.x + w * .5f, boxRect.y);
}
p.closePath();
Graphics2D g2 = (Graphics2D) g.create();
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setPaint(progressBar.getForeground());
if (slope) {
for (int i = boxRect.width + x; i > -w; i -= w) {
g2.fill(AffineTransform.getTranslateInstance(i, 0).createTransformedShape(p));
}
} else {
for (int i = -x; i < boxRect.width; i += w) {
g2.fill(AffineTransform.getTranslateInstance(i, 0).createTransformedShape(p));
}
}
g2.dispose();
}
}
}
class BackgroundTask extends SwingWorker<String, Void> {
@Override public String doInBackground() {
try { // dummy task
Thread.sleep(5000);
} catch (InterruptedException ex) {
return "Interrupted";
}
int current = 0;
int lengthOfTask = 100;
while (current <= lengthOfTask && !isCancelled()) {
try { // dummy task
Thread.sleep(50);
} catch (InterruptedException ex) {
return "Interrupted";
}
setProgress(100 * current / lengthOfTask);
current++;
}
return "Done";
}
}
class ProgressListener implements PropertyChangeListener {
private final JProgressBar progressBar;
protected ProgressListener(JProgressBar progressBar) {
this.progressBar = progressBar;
this.progressBar.setValue(0);
}
@Override public void propertyChange(PropertyChangeEvent e) {
String strPropertyName = e.getPropertyName();
if ("progress".equals(strPropertyName)) {
progressBar.setIndeterminate(false);
int progress = (Integer) e.getNewValue();
progressBar.setValue(progress);
}
}
}
| refactor: use Rectangle#getMaxY() instead of r.y + r.height
| StripedProgressBar/src/java/example/MainPanel.java | refactor: use Rectangle#getMaxY() instead of r.y + r.height | <ide><path>tripedProgressBar/src/java/example/MainPanel.java
<ide> GeneralPath p = new GeneralPath();
<ide> if (dir) {
<ide> p.moveTo(boxRect.x, boxRect.y);
<del> p.lineTo(boxRect.x + w * .5f, boxRect.y + boxRect.height);
<del> p.lineTo(boxRect.x + w, boxRect.y + boxRect.height);
<del> p.lineTo(boxRect.x + w * .5f, boxRect.y);
<add> p.lineTo(boxRect.x + w * .5f, boxRect.getMaxY());
<add> p.lineTo(boxRect.x + (float) w, boxRect.getMaxY());
<ide> } else {
<del> p.moveTo(boxRect.x, boxRect.y + boxRect.height);
<del> p.lineTo(boxRect.x + w * .5f, boxRect.y + boxRect.height);
<del> p.lineTo(boxRect.x + w, boxRect.y);
<del> p.lineTo(boxRect.x + w * .5f, boxRect.y);
<add> p.moveTo(boxRect.x, boxRect.getMaxY());
<add> p.lineTo(boxRect.x + w * .5f, boxRect.getMaxY());
<add> p.lineTo(boxRect.x + (float) w, boxRect.y);
<ide> }
<add> p.lineTo(boxRect.x + w * .5f, boxRect.y);
<ide> p.closePath();
<ide>
<ide> Graphics2D g2 = (Graphics2D) g.create();
<ide> }
<ide>
<ide> class BackgroundTask extends SwingWorker<String, Void> {
<del> @Override public String doInBackground() {
<del> try { // dummy task
<del> Thread.sleep(5000);
<del> } catch (InterruptedException ex) {
<del> return "Interrupted";
<del> }
<add> @Override public String doInBackground() throws InterruptedException {
<add> Thread.sleep(5000); // dummy task 1
<ide> int current = 0;
<ide> int lengthOfTask = 100;
<ide> while (current <= lengthOfTask && !isCancelled()) {
<del> try { // dummy task
<del> Thread.sleep(50);
<del> } catch (InterruptedException ex) {
<del> return "Interrupted";
<del> }
<add> Thread.sleep(50); // dummy task 2
<ide> setProgress(100 * current / lengthOfTask);
<ide> current++;
<ide> } |
|
Java | apache-2.0 | d6c5335e12d7fb4b567696f2db1785a7fa8c0fe6 | 0 | opentable/otj-jaxrs,sannessa/otj-jaxrs | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opentable.jaxrs;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.ClientErrorException;
import javax.ws.rs.ServerErrorException;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Objects;
import com.google.common.base.Throwables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opentable.callback.Callback;
import com.opentable.callback.CallbackRefusedException;
@Singleton
public class StreamedJsonResponseConverter
{
private static final Logger LOG = LoggerFactory.getLogger(StreamedJsonResponseConverter.class);
private final ObjectMapper mapper;
@Inject
StreamedJsonResponseConverter(ObjectMapper mapper)
{
this.mapper = mapper;
}
public <T> void read(Response response,
Callback<T> callback,
TypeReference<T> type)
throws IOException
{
try {
final int sc = response.getStatus();
switch(sc)
{
case 201:
case 204:
LOG.debug("Return code is {}, finishing.", response.getStatus());
return;
case 200:
try (final JsonParser jp = mapper.getFactory().createParser(response.readEntity(InputStream.class))) {
doRead(callback, type, jp);
}
return;
default:
if (sc >= 400 && sc < 500) {
throw new ClientErrorException(response);
}
throw new ServerErrorException(response);
}
} finally {
response.close();
}
}
private <T> void doRead(
Callback<T> callback,
TypeReference<T> type,
final JsonParser jp)
throws IOException
{
expect(jp, jp.nextToken(), JsonToken.START_OBJECT);
expect(jp, jp.nextToken(), JsonToken.FIELD_NAME);
if (!"results".equals(jp.getCurrentName())) {
throw new JsonParseException(jp, "expecting results field");
}
expect(jp, jp.nextToken(), JsonToken.START_ARRAY);
// As noted in a well-hidden comment in the MappingIterator constructor,
// readValuesAs requires the parser to be positioned after the START_ARRAY
// token with an empty current token
jp.clearCurrentToken();
Iterator<T> iter = jp.readValuesAs(type);
while (iter.hasNext()) {
try {
callback.call(iter.next());
}
catch (CallbackRefusedException e) {
LOG.debug("callback refused execution, finishing.", e);
return;
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Callback interrupted", e);
}
catch (Exception e) {
Throwables.propagateIfPossible(e, IOException.class);
throw new IOException("Callback failure", e);
}
}
if (jp.nextValue() != JsonToken.VALUE_TRUE || !jp.getCurrentName().equals("success")) {
throw new IOException("Streamed receive did not terminate normally; inspect server logs for cause.");
}
}
private void expect(final JsonParser jp, final JsonToken token, final JsonToken expected) throws JsonParseException
{
if (!Objects.equal(token, expected)) {
throw new JsonParseException(jp, String.format("Expected %s, found %s", expected, token));
}
}
}
| shared/src/main/java/com/opentable/jaxrs/StreamedJsonResponseConverter.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opentable.jaxrs;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.ClientErrorException;
import javax.ws.rs.ServerErrorException;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Objects;
import com.google.common.base.Throwables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opentable.callback.Callback;
import com.opentable.callback.CallbackRefusedException;
@Singleton
public class StreamedJsonResponseConverter
{
private static final Logger LOG = LoggerFactory.getLogger(StreamedJsonResponseConverter.class);
private final ObjectMapper mapper;
@Inject
StreamedJsonResponseConverter(ObjectMapper mapper)
{
this.mapper = mapper;
}
public <T> void read(Response response,
Callback<T> callback,
TypeReference<T> type)
throws IOException
{
try {
final int sc = response.getStatus();
switch(sc)
{
case 201:
case 204:
LOG.debug("Return code is {}, finishing.", response.getStatus());
return;
case 200:
try (final JsonParser jp = mapper.getFactory().createParser(response.readEntity(InputStream.class))) {
doRead(callback, type, jp);
}
return;
default:
if (sc >= 400 && sc < 500) {
throw new ClientErrorException(response);
}
throw new ServerErrorException(response);
}
} finally {
response.close();
}
}
private <T> void doRead(
Callback<T> callback,
TypeReference<T> type,
final JsonParser jp)
throws IOException
{
expect(jp, jp.nextToken(), JsonToken.START_OBJECT);
expect(jp, jp.nextToken(), JsonToken.FIELD_NAME);
if (!"results".equals(jp.getCurrentName())) {
throw new JsonParseException("expecting results field", jp.getCurrentLocation());
}
expect(jp, jp.nextToken(), JsonToken.START_ARRAY);
// As noted in a well-hidden comment in the MappingIterator constructor,
// readValuesAs requires the parser to be positioned after the START_ARRAY
// token with an empty current token
jp.clearCurrentToken();
Iterator<T> iter = jp.readValuesAs(type);
while (iter.hasNext()) {
try {
callback.call(iter.next());
}
catch (CallbackRefusedException e) {
LOG.debug("callback refused execution, finishing.", e);
return;
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException("Callback interrupted", e);
}
catch (Exception e) {
Throwables.propagateIfPossible(e, IOException.class);
throw new IOException("Callback failure", e);
}
}
if (jp.nextValue() != JsonToken.VALUE_TRUE || !jp.getCurrentName().equals("success")) {
throw new IOException("Streamed receive did not terminate normally; inspect server logs for cause.");
}
}
private void expect(final JsonParser jp, final JsonToken token, final JsonToken expected) throws JsonParseException
{
if (!Objects.equal(token, expected)) {
throw new JsonParseException(String.format("Expected %s, found %s", expected, token), jp.getCurrentLocation());
}
}
}
| Fix Jackson deprecation warnings
| shared/src/main/java/com/opentable/jaxrs/StreamedJsonResponseConverter.java | Fix Jackson deprecation warnings | <ide><path>hared/src/main/java/com/opentable/jaxrs/StreamedJsonResponseConverter.java
<ide> expect(jp, jp.nextToken(), JsonToken.START_OBJECT);
<ide> expect(jp, jp.nextToken(), JsonToken.FIELD_NAME);
<ide> if (!"results".equals(jp.getCurrentName())) {
<del> throw new JsonParseException("expecting results field", jp.getCurrentLocation());
<add> throw new JsonParseException(jp, "expecting results field");
<ide> }
<ide> expect(jp, jp.nextToken(), JsonToken.START_ARRAY);
<ide> // As noted in a well-hidden comment in the MappingIterator constructor,
<ide> private void expect(final JsonParser jp, final JsonToken token, final JsonToken expected) throws JsonParseException
<ide> {
<ide> if (!Objects.equal(token, expected)) {
<del> throw new JsonParseException(String.format("Expected %s, found %s", expected, token), jp.getCurrentLocation());
<add> throw new JsonParseException(jp, String.format("Expected %s, found %s", expected, token));
<ide> }
<ide> }
<ide> } |
|
Java | mit | ca310d16574e61524e35605bae61717741324ff8 | 0 | servicosgoval/editor-de-servicos,servicosgoval/editor-de-servicos,servicosgovbr/editor-de-servicos,servicosgovbr/editor-de-servicos,servicosgovbr/editor-de-servicos,servicosgoval/editor-de-servicos,servicosgovbr/editor-de-servicos,servicosgoval/editor-de-servicos | package br.gov.servicos.editor.xml;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import lombok.experimental.FieldDefaults;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import static java.lang.Boolean.parseBoolean;
import static java.nio.charset.Charset.defaultCharset;
import static java.util.Collections.emptyList;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.toList;
import static lombok.AccessLevel.PRIVATE;
@FieldDefaults(level = PRIVATE, makeFinal = true)
public class ArquivoXml {
Optional<Element> xml;
public ArquivoXml(String caminhoXml, String root) throws IOException {
this(Jsoup.parse(new File(caminhoXml), defaultCharset().name()).select(root).first());
}
private ArquivoXml(Element xml) {
this.xml = ofNullable(xml);
}
public String atributo(String atributo) {
return xml.map(x -> x.attr(atributo).trim()).orElse("");
}
public String atributo(String seletor, String atributo) {
return navega(seletor).atributo(atributo);
}
public Boolean atrituboAtivo(String atributo) {
return paraBooleano(atributo(atributo));
}
public Boolean textoAtivo(String seletor) {
return paraBooleano(navega(seletor).texto());
}
public String texto() {
return xml.map(x -> x.text().trim()).orElse("");
}
public String texto(String seletor) {
return navega(seletor).texto();
}
public List<String> coleta(String seletor) {
return coleta(seletor, ArquivoXml::texto);
}
public <T> List<T> coleta(String seletor, Function<ArquivoXml, T> conversor) {
return xml.map(
x -> x.select(seletor)
.stream()
.map(e -> new ArquivoXml(e).converte(conversor))
.collect(toList())
).orElse(emptyList());
}
public <T> T converte(Function<ArquivoXml, T> conversor) {
return conversor.apply(this);
}
public <T> T converte(String seletor, Function<ArquivoXml, T> conversor) {
return navega(seletor).converte(conversor);
}
private ArquivoXml navega(String seletor) {
return new ArquivoXml(
xml.map(x -> x.select(seletor).first()).orElse(null)
);
}
@SuppressFBWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "Campos não preenchidos não devem ser false")
private Boolean paraBooleano(String valor) {
if (valor.equals("true") || valor.equals("false"))
return parseBoolean(valor);
return null;
}
}
| src/main/java/br/gov/servicos/editor/xml/ArquivoXml.java | package br.gov.servicos.editor.xml;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import lombok.experimental.FieldDefaults;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import static java.lang.Boolean.parseBoolean;
import static java.util.Collections.emptyList;
import static java.util.Optional.ofNullable;
import static java.util.stream.Collectors.toList;
import static lombok.AccessLevel.PRIVATE;
@FieldDefaults(level = PRIVATE, makeFinal = true)
public class ArquivoXml {
private static final String UTF_8 = "UTF-8";
Optional<Element> xml;
public ArquivoXml(String caminhoXml, String root) throws IOException {
this(Jsoup.parse(new File(caminhoXml), UTF_8).select(root).first());
}
private ArquivoXml(Element xml) {
this.xml = ofNullable(xml);
}
public String atributo(String atributo) {
return xml.map(x -> x.attr(atributo).trim()).orElse("");
}
public String atributo(String seletor, String atributo) {
return navega(seletor).atributo(atributo);
}
public Boolean atrituboAtivo(String atributo) {
return paraBooleano(atributo(atributo));
}
public Boolean textoAtivo(String seletor) {
return paraBooleano(navega(seletor).texto());
}
public String texto() {
return xml.map(x -> x.text().trim()).orElse("");
}
public String texto(String seletor) {
return navega(seletor).texto();
}
public List<String> coleta(String seletor) {
return coleta(seletor, ArquivoXml::texto);
}
public <T> List<T> coleta(String seletor, Function<ArquivoXml, T> conversor) {
return xml.map(
x -> x.select(seletor)
.stream()
.map(e -> new ArquivoXml(e).converte(conversor))
.collect(toList())
).orElse(emptyList());
}
public <T> T converte(Function<ArquivoXml, T> conversor) {
return conversor.apply(this);
}
public <T> T converte(String seletor, Function<ArquivoXml, T> conversor) {
return navega(seletor).converte(conversor);
}
private ArquivoXml navega(String seletor) {
return new ArquivoXml(
xml.map(x -> x.select(seletor).first()).orElse(null)
);
}
@SuppressFBWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "Campos não preenchidos não devem ser false")
private Boolean paraBooleano(String valor) {
if (valor.equals("true") || valor.equals("false"))
return parseBoolean(valor);
return null;
}
}
| Usa encoding default
| src/main/java/br/gov/servicos/editor/xml/ArquivoXml.java | Usa encoding default | <ide><path>rc/main/java/br/gov/servicos/editor/xml/ArquivoXml.java
<ide> import java.util.function.Function;
<ide>
<ide> import static java.lang.Boolean.parseBoolean;
<add>import static java.nio.charset.Charset.defaultCharset;
<ide> import static java.util.Collections.emptyList;
<ide> import static java.util.Optional.ofNullable;
<ide> import static java.util.stream.Collectors.toList;
<ide> @FieldDefaults(level = PRIVATE, makeFinal = true)
<ide> public class ArquivoXml {
<ide>
<del> private static final String UTF_8 = "UTF-8";
<del>
<ide> Optional<Element> xml;
<ide>
<ide> public ArquivoXml(String caminhoXml, String root) throws IOException {
<del> this(Jsoup.parse(new File(caminhoXml), UTF_8).select(root).first());
<add> this(Jsoup.parse(new File(caminhoXml), defaultCharset().name()).select(root).first());
<ide> }
<ide>
<ide> private ArquivoXml(Element xml) { |
|
JavaScript | agpl-3.0 | 41df5dc1b692571b7b55b50f88beb3fdf7b6e0d3 | 0 | vladm3/mwEmbed,kaltura/mwEmbed,joanpuigsanz/mwEmbed,kaltura/mwEmbed,SVSG/mwEmbed,kaltura/mwEmbed,FlixMaster/mwEmbed,bordar/mwEmbed,DBezemer/mwEmbed,bordar/mwEmbed,omridevk/mwEmbed,joanpuigsanz/mwEmbed,panda-os/mwEmbed,alexmilk/mwEmbed,omridevk/mwEmbed,abaylis/mwEmbed,SVSG/mwEmbed,alexmilk/mwEmbed,joanpuigsanz/mwEmbed,DBezemer/mwEmbed,omridevk/mwEmbed,slash851/mwEmbed,omridevk/mwEmbed,SVSG/mwEmbed,panda-os/mwEmbed,tanyaLibatter/mwEmbed,tanyaLibatter/mwEmbed,kaltura/mwEmbed,bordar/mwEmbed,FlixMaster/mwEmbed,DBezemer/mwEmbed,FlixMaster/mwEmbed,vladm3/mwEmbed,matsuu/mwEmbed,abaylis/mwEmbed,tanyaLibatter/mwEmbed,panda-os/mwEmbed,vladm3/mwEmbed,slash851/mwEmbed,abaylis/mwEmbed,matsuu/mwEmbed,matsuu/mwEmbed,joanpuigsanz/mwEmbed,safarijv/mwEmbed,vladm3/mwEmbed,safarijv/mwEmbed,alexmilk/mwEmbed,DBezemer/mwEmbed,FlixMaster/mwEmbed,safarijv/mwEmbed,abaylis/mwEmbed,tanyaLibatter/mwEmbed,matsuu/mwEmbed,safarijv/mwEmbed,alexmilk/mwEmbed,bordar/mwEmbed,SVSG/mwEmbed,slash851/mwEmbed,panda-os/mwEmbed,slash851/mwEmbed | /**
* A media element corresponding to a <video> element.
*
* It is implemented as a collection of mediaSource objects. The media sources
* will be initialized from the <video> element, its child <source> elements,
* and/or the ROE file referenced by the <video> element.
*
* @param {element}
* videoElement <video> element used for initialization.
* @constructor
*/
( function( mw, $ ) {
mw.MediaElement = function( element ) {
this.init( element );
};
mw.MediaElement.prototype = {
// The array of mediaSource elements.
sources: null,
// flag for ROE data being added.
addedROEData: false,
// Selected mediaSource element.
selectedSource: null,
/**
* Media Element constructor
*
* Sets up a mediaElement from a provided top level "video" element adds any
* child sources that are found
*
* @param {Element}
* videoElement Element that has src attribute or has children
* source elements
*/
init: function( videoElement ) {
var _this = this;
mw.log( "EmbedPlayer::mediaElement:init:" + videoElement.id );
this.parentEmbedId = videoElement.id;
this.sources = new Array();
// Process the videoElement as a source element:
if( videoElement ){
if ( $( videoElement ).attr( "src" ) ) {
_this.tryAddSource( videoElement );
}
// Process elements source children
$( videoElement ).find( 'source,track' ).each( function( ) {
_this.tryAddSource( this );
} );
}
},
/**
* Updates the time request for all sources that have a standard time
* request argument (ie &t=start_time/end_time)
*
* @param {String}
* startNpt Start time in npt format
* @param {String}
* endNpt End time in npt format
*/
updateSourceTimes: function( startNpt, endNpt ) {
var _this = this;
$.each( this.sources, function( inx, mediaSource ) {
mediaSource.updateSrcTime( startNpt, endNpt );
} );
},
/**
* Get Text tracks
*/
getTextTracks: function(){
var textTracks = [];
$.each( this.sources, function(inx, source ){
if ( source.nodeName == 'track' || ( source.mimeType && source.mimeType.indexOf('text/') !== -1 )){
textTracks.push( source );
}
});
return textTracks;
},
/**
* Returns the array of mediaSources of this element.
*
* @param {String}
* [mimeFilter] Filter criteria for set of mediaSources to return
* @return {Array} mediaSource elements.
*/
getSources: function( mimeFilter ) {
if ( !mimeFilter ) {
return this.sources;
}
// Apply mime filter:
var source_set = new Array();
for ( var i = 0; i < this.sources.length ; i++ ) {
if ( this.sources[i].mimeType &&
this.sources[i].mimeType.indexOf( mimeFilter ) != -1 )
{
source_set.push( this.sources[i] );
}
}
return source_set;
},
/**
* Selects a source by id
*
* @param {String}
* sourceId Id of the source to select.
* @return {MediaSource} The selected mediaSource or null if not found
*/
getSourceById:function( sourceId ) {
for ( var i = 0; i < this.sources.length ; i++ ) {
if ( this.sources[i].id == sourceId ) {
return this.sources[i];
}
}
return null;
},
/**
* Selects a particular source for playback updating the "selectedSource"
*
* @param {Number}
* index Index of source element to set as selectedSource
*/
setSourceByIndex: function( index ) {
mw.log( 'EmbedPlayer::mediaElement:selectSource: ' + index );
var oldSrc = this.selectedSource.getSrc();
var playableSources = this.getPlayableSources();
for ( var i = 0; i < playableSources.length; i++ ) {
if ( i == index ) {
this.selectedSource = playableSources[i];
break;
}
}
if( oldSrc != this.selectedSource.getSrc() ){
$( '#' + this.parentEmbedId ).trigger( 'SourceChange');
}
},
/**
* Sets a the selected source to passed in source object
* @param {Object} Source
*/
setSource: function( source ){
var oldSrc = this.selectedSource.getSrc();
this.selectedSource = source;
if( oldSrc != this.selectedSource.getSrc() ){
$( '#' + this.parentEmbedId ).trigger( 'SourceChange');
}
},
/**
* Selects the default source via cookie preference, default marked, or by
* id order
*/
autoSelectSource: function() {
mw.log( 'EmbedPlayer::mediaElement::autoSelectSource' );
var _this = this;
// Select the default source
var playableSources = this.getPlayableSources();
var flash_flag = ogg_flag = false;
// Check if there are any playableSources
if( playableSources.length == 0 ){
return false;
}
var setSelectedSource = function( source ){
_this.selectedSource = source;
return _this.selectedSource;
};
// Set via user-preference
$.each( playableSources, function( inx, source ){
var mimeType =source.mimeType;
if ( mw.EmbedTypes.getMediaPlayers().preference[ 'format_preference' ] == mimeType ) {
mw.log( 'MediaElement::autoSelectSource: Set via format_preference: ' + source.mimeType );
return setSelectedSource( source );
}
});
// Set via module driven preference:
$( this ).trigger( 'AutoSelectSource', playableSources );
if( _this.selectedSource ){
mw.log('MediaElement::autoSelectSource: Set via trigger::' + _this.selectedSource.getTitle() );
return _this.selectedSource;
}
// Set via marked default:
$.each( playableSources, function( inx, source ){
if ( source.markedDefault ) {
mw.log( 'MediaElement::autoSelectSource: Set via marked default: ' + source.markedDefault );
return setSelectedSource( source );;
}
});
//Set via user bandwith pref will always set source to closest bandwith allocation while not going over EmbedPlayer.UserBandwidth
if( $.cookie('EmbedPlayer.UserBandwidth') ){
var currentMaxBadwith = 0;
$.each( playableSources, function(inx, source ){
if( source.bandwidth ){
if( source.bandwidth > currentMaxBadwith && source.bandwidth <= $.cookie('EmbedPlayer.UserBandwidth') ){
currentMaxBadwith = source.bandwidth;
setSelectedSource( source );
}
}
});
}
if ( this.selectedSource ) {
mw.log('MediaElement::autoSelectSource: Set via bandwidth prefrence: source ' + source.bandwidth + ' user: ' + $.cookie('EmbedPlayer.UserBandwidth') );
return this.selectedSource;
}
// Set via embed resolution closest to relative to display size
var minSizeDelta = null;
if( this.parentEmbedId ){
var displayWidth = $('#' + this.parentEmbedId).width();
$.each( playableSources, function(inx, source ){
if( source.width && displayWidth ){
var sizeDelta = Math.abs( source.width - displayWidth );
mw.log('MediaElement::autoSelectSource: size delta : ' + sizeDelta + ' for s:' + source.width );
if( minSizeDelta == null || sizeDelta < minSizeDelta){
minSizeDelta = sizeDelta;
setSelectedSource( source );
}
}
});
}
// If we found a source via display resolution return true
if ( this.selectedSource ) {
mw.log('MediaElement::autoSelectSource: Set via embed resolution:' + this.selectedSource.width + ' close to: ' + displayWidth );
return this.selectedSource;
}
// Prefer native playback ( and prefer WebM over ogg and h.264 )
var namedSources = {};
$.each( playableSources, function(inx, source ){
var mimeType = source.mimeType;
var player = mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType );
if ( player && player.library == 'Native' ) {
switch( player.id ){
case 'mp3Native':
namedSources['mp3'] = source;
break;
case 'oggNative':
namedSources['ogg'] = source;
break;
case 'webmNative':
namedSources['webm'] = source;
break;
case 'h264Native':
namedSources['h264'] = source;
break;
}
}
});
var codecPref = mw.getConfig( 'EmbedPlayer.CodecPreference');
for(var i =0; i < codecPref.length; i++){
var codec = codecPref[ i ];
if( namedSources[ codec ]){
mw.log('MediaElement::autoSelectSource: set via EmbedPlayer.CodecPreference: ' + namedSources[ codec ].getTitle() );
return setSelectedSource( namedSources[ codec ] );
}
};
// Set h264 via native or flash fallback
$.each( playableSources, function(inx, source ){
var mimeType = source.mimeType;
var player = mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType );
if ( mimeType == 'video/h264'
&& player
&& (
player.library == 'Native'
||
player.library == 'Kplayer'
)
) {
if( source ){
mw.log('MediaElement::autoSelectSource: Set h264 via native or flash fallback:' + source.getTitle() );
return setSelectedSource( source );
}
}
});
// Else just select the first playable source
if ( !this.selectedSource && playableSources[0] ) {
mw.log( 'MediaElement::autoSelectSource: Set via first source: ' + playableSources[0].getTitle() );
return setSelectedSource( playableSources[0] );
}
// No Source found so no source selected
return false;
},
/**
* check if the mime is ogg
*/
isOgg: function( mimeType ){
if ( mimeType == 'video/ogg'
|| mimeType == 'ogg/video'
|| mimeType == 'video/annodex'
|| mimeType == 'application/ogg'
) {
return true;
}
return false;
},
/**
* Returns the thumbnail URL for the media element.
*
* @returns {String} thumbnail URL
*/
getPosterSrc: function( ) {
return this.poster;
},
/**
* Checks whether there is a stream of a specified MIME type.
*
* @param {String}
* mimeType MIME type to check.
* @return {Boolean} true if sources include MIME false if not.
*/
hasStreamOfMIMEType: function( mimeType )
{
for ( var i = 0; i < this.sources.length; i++ )
{
if ( this.sources[i].getMIMEType() == mimeType ){
return true;
}
}
return false;
},
/**
* Checks if media is a playable type
*/
isPlayableType: function( mimeType ) {
// mw.log("isPlayableType:: " + mimeType);
if ( mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType ) ) {
return true;
} else {
return false;
}
},
/**
* Adds a single mediaSource using the provided element if the element has a
* 'src' attribute.
*
* @param {Element}
* element <video>, <source> or <mediaSource> <text> element.
*/
tryAddSource: function( element ) {
//mw.log( 'mw.MediaElement::tryAddSource:' + $( element ).attr( "src" ) );
var newSrc = $( element ).attr( 'src' );
if ( newSrc ) {
// Make sure an existing element with the same src does not already exist:
for ( var i = 0; i < this.sources.length; i++ ) {
if ( this.sources[i].src == newSrc ) {
// Source already exists update any new attr:
this.sources[i].updateSource( element );
return this.sources[i];
}
}
}
// Create a new source
var source = new mw.MediaSource( element );
this.sources.push( source );
//mw.log( 'tryAddSource: added source ::' + source + 'sl:' + this.sources.length );
return source;
},
/**
* Get playable sources
*
* @returns {Array} of playable media sources
*/
getPlayableSources: function() {
var playableSources = [];
for ( var i = 0; i < this.sources.length; i++ ) {
if ( this.isPlayableType( this.sources[i].mimeType ) ) {
playableSources.push( this.sources[i] );
} else {
}
};
mw.log( "MediaElement::GetPlayableSources " + playableSources.length + ' sources playable out of ' + this.sources.length );
return playableSources;
}
};
} )( mediaWiki, jQuery );
| modules/EmbedPlayer/mw.MediaElement.js | /**
* A media element corresponding to a <video> element.
*
* It is implemented as a collection of mediaSource objects. The media sources
* will be initialized from the <video> element, its child <source> elements,
* and/or the ROE file referenced by the <video> element.
*
* @param {element}
* videoElement <video> element used for initialization.
* @constructor
*/
( function( mw, $ ) {
mw.MediaElement = function( element ) {
this.init( element );
};
mw.MediaElement.prototype = {
// The array of mediaSource elements.
sources: null,
// flag for ROE data being added.
addedROEData: false,
// Selected mediaSource element.
selectedSource: null,
/**
* Media Element constructor
*
* Sets up a mediaElement from a provided top level "video" element adds any
* child sources that are found
*
* @param {Element}
* videoElement Element that has src attribute or has children
* source elements
*/
init: function( videoElement ) {
var _this = this;
mw.log( "EmbedPlayer::mediaElement:init:" + videoElement.id );
this.parentEmbedId = videoElement.id;
this.sources = new Array();
// Process the videoElement as a source element:
if( videoElement ){
if ( $( videoElement ).attr( "src" ) ) {
_this.tryAddSource( videoElement );
}
// Process elements source children
$( videoElement ).find( 'source,track' ).each( function( ) {
_this.tryAddSource( this );
} );
}
},
/**
* Updates the time request for all sources that have a standard time
* request argument (ie &t=start_time/end_time)
*
* @param {String}
* startNpt Start time in npt format
* @param {String}
* endNpt End time in npt format
*/
updateSourceTimes: function( startNpt, endNpt ) {
var _this = this;
$.each( this.sources, function( inx, mediaSource ) {
mediaSource.updateSrcTime( startNpt, endNpt );
} );
},
/**
* Get Text tracks
*/
getTextTracks: function(){
var textTracks = [];
$.each( this.sources, function(inx, source ){
if ( source.nodeName == 'track' || ( source.mimeType && source.mimeType.indexOf('text/') !== -1 )){
textTracks.push( source );
}
});
return textTracks;
},
/**
* Returns the array of mediaSources of this element.
*
* @param {String}
* [mimeFilter] Filter criteria for set of mediaSources to return
* @return {Array} mediaSource elements.
*/
getSources: function( mimeFilter ) {
if ( !mimeFilter ) {
return this.sources;
}
// Apply mime filter:
var source_set = new Array();
for ( var i = 0; i < this.sources.length ; i++ ) {
if ( this.sources[i].mimeType &&
this.sources[i].mimeType.indexOf( mimeFilter ) != -1 )
{
source_set.push( this.sources[i] );
}
}
return source_set;
},
/**
* Selects a source by id
*
* @param {String}
* sourceId Id of the source to select.
* @return {MediaSource} The selected mediaSource or null if not found
*/
getSourceById:function( sourceId ) {
for ( var i = 0; i < this.sources.length ; i++ ) {
if ( this.sources[i].id == sourceId ) {
return this.sources[i];
}
}
return null;
},
/**
* Selects a particular source for playback updating the "selectedSource"
*
* @param {Number}
* index Index of source element to set as selectedSource
*/
setSourceByIndex: function( index ) {
mw.log( 'EmbedPlayer::mediaElement:selectSource: ' + index );
var oldSrc = this.selectedSource.getSrc();
var playableSources = this.getPlayableSources();
for ( var i = 0; i < playableSources.length; i++ ) {
if ( i == index ) {
this.selectedSource = playableSources[i];
break;
}
}
if( oldSrc != this.selectedSource.getSrc() ){
$( '#' + this.parentEmbedId ).trigger( 'SourceChange');
}
},
/**
* Sets a the selected source to passed in source object
* @param {Object} Source
*/
setSource: function( source ){
var oldSrc = this.selectedSource.getSrc();
this.selectedSource = source;
if( oldSrc != this.selectedSource.getSrc() ){
$( '#' + this.parentEmbedId ).trigger( 'SourceChange');
}
},
/**
* Selects the default source via cookie preference, default marked, or by
* id order
*/
autoSelectSource: function() {
mw.log( 'EmbedPlayer::mediaElement::autoSelectSource' );
var _this = this;
// Select the default source
var playableSources = this.getPlayableSources();
var flash_flag = ogg_flag = false;
// Check if there are any playableSources
if( playableSources.length == 0 ){
return false;
}
var setSelectedSource = function( source ){
_this.selectedSource = source;
return _this.selectedSource;
};
// Set via user-preference
$.each( playableSources, function( inx, source ){
var mimeType =source.mimeType;
if ( mw.EmbedTypes.getMediaPlayers().preference[ 'format_preference' ] == mimeType ) {
mw.log( 'MediaElement::autoSelectSource: Set via format_preference: ' + source.mimeType );
return setSelectedSource( source );
}
});
// Set via module driven preference:
$( this ).trigger( 'AutoSelectSource', playableSources );
if( _this.selectedSource ){
mw.log('MediaElement::autoSelectSource: Set via trigger::' + _this.selectedSource.getTitle() );
return _this.selectedSource;
}
// Set via marked default:
$.each( playableSources, function( inx, source ){
if ( source.markedDefault ) {
mw.log( 'MediaElement::autoSelectSource: Set via marked default: ' + source.markedDefault );
return setSelectedSource( source );;
}
});
//Set via user bandwith pref
if( $.cookie('EmbedPlayer.UserBandwidth') ){
$.each( playableSources, function(inx, source ){
if( source.bandwidth ){
if( source.bandwidth < $.cookie('EmbedPlayer.UserBandwidth') ){
setSelectedSource( source );
}
}
});
}
if ( this.selectedSource ) {
mw.log('MediaElement::autoSelectSource: Set via bandwidth prefrence: source ' + source.bandwidth + ' user: ' + $.cookie('EmbedPlayer.UserBandwidth') );
return this.selectedSource;
}
// Set via embed resolution closest to relative to display size
var minSizeDelta = null;
if( this.parentEmbedId ){
var displayWidth = $('#' + this.parentEmbedId).width();
$.each( playableSources, function(inx, source ){
if( source.width && displayWidth ){
var sizeDelta = Math.abs( source.width - displayWidth );
mw.log('MediaElement::autoSelectSource: size delta : ' + sizeDelta + ' for s:' + source.width );
if( minSizeDelta == null || sizeDelta < minSizeDelta){
minSizeDelta = sizeDelta;
setSelectedSource( source );
}
}
});
}
// If we found a source via display resolution return true
if ( this.selectedSource ) {
mw.log('MediaElement::autoSelectSource: Set via embed resolution:' + this.selectedSource.width + ' close to: ' + displayWidth );
return this.selectedSource;
}
// Prefer native playback ( and prefer WebM over ogg and h.264 )
var namedSources = {};
$.each( playableSources, function(inx, source ){
var mimeType = source.mimeType;
var player = mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType );
if ( player && player.library == 'Native' ) {
switch( player.id ){
case 'mp3Native':
namedSources['mp3'] = source;
break;
case 'oggNative':
namedSources['ogg'] = source;
break;
case 'webmNative':
namedSources['webm'] = source;
break;
case 'h264Native':
namedSources['h264'] = source;
break;
}
}
});
var codecPref = mw.getConfig( 'EmbedPlayer.CodecPreference');
for(var i =0; i < codecPref.length; i++){
var codec = codecPref[ i ];
if( namedSources[ codec ]){
mw.log('MediaElement::autoSelectSource: set via EmbedPlayer.CodecPreference: ' + namedSources[ codec ].getTitle() );
return setSelectedSource( namedSources[ codec ] );
}
};
// Set h264 via native or flash fallback
$.each( playableSources, function(inx, source ){
var mimeType = source.mimeType;
var player = mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType );
if ( mimeType == 'video/h264'
&& player
&& (
player.library == 'Native'
||
player.library == 'Kplayer'
)
) {
if( source ){
mw.log('MediaElement::autoSelectSource: Set h264 via native or flash fallback:' + source.getTitle() );
return setSelectedSource( source );
}
}
});
// Else just select the first playable source
if ( !this.selectedSource && playableSources[0] ) {
mw.log( 'MediaElement::autoSelectSource: Set via first source: ' + playableSources[0].getTitle() );
return setSelectedSource( playableSources[0] );
}
// No Source found so no source selected
return false;
},
/**
* check if the mime is ogg
*/
isOgg: function( mimeType ){
if ( mimeType == 'video/ogg'
|| mimeType == 'ogg/video'
|| mimeType == 'video/annodex'
|| mimeType == 'application/ogg'
) {
return true;
}
return false;
},
/**
* Returns the thumbnail URL for the media element.
*
* @returns {String} thumbnail URL
*/
getPosterSrc: function( ) {
return this.poster;
},
/**
* Checks whether there is a stream of a specified MIME type.
*
* @param {String}
* mimeType MIME type to check.
* @return {Boolean} true if sources include MIME false if not.
*/
hasStreamOfMIMEType: function( mimeType )
{
for ( var i = 0; i < this.sources.length; i++ )
{
if ( this.sources[i].getMIMEType() == mimeType ){
return true;
}
}
return false;
},
/**
* Checks if media is a playable type
*/
isPlayableType: function( mimeType ) {
// mw.log("isPlayableType:: " + mimeType);
if ( mw.EmbedTypes.getMediaPlayers().defaultPlayer( mimeType ) ) {
return true;
} else {
return false;
}
},
/**
* Adds a single mediaSource using the provided element if the element has a
* 'src' attribute.
*
* @param {Element}
* element <video>, <source> or <mediaSource> <text> element.
*/
tryAddSource: function( element ) {
//mw.log( 'mw.MediaElement::tryAddSource:' + $( element ).attr( "src" ) );
var newSrc = $( element ).attr( 'src' );
if ( newSrc ) {
// Make sure an existing element with the same src does not already exist:
for ( var i = 0; i < this.sources.length; i++ ) {
if ( this.sources[i].src == newSrc ) {
// Source already exists update any new attr:
this.sources[i].updateSource( element );
return this.sources[i];
}
}
}
// Create a new source
var source = new mw.MediaSource( element );
this.sources.push( source );
//mw.log( 'tryAddSource: added source ::' + source + 'sl:' + this.sources.length );
return source;
},
/**
* Get playable sources
*
* @returns {Array} of playable media sources
*/
getPlayableSources: function() {
var playableSources = [];
for ( var i = 0; i < this.sources.length; i++ ) {
if ( this.isPlayableType( this.sources[i].mimeType ) ) {
playableSources.push( this.sources[i] );
} else {
}
};
mw.log( "MediaElement::GetPlayableSources " + playableSources.length + ' sources playable out of ' + this.sources.length );
return playableSources;
}
};
} )( mediaWiki, jQuery );
| bandwith pref will always set source to closest bandwith allocation while not going over EmbedPlayer.UserBandwidth
git-svn-id: c0b6191cc07a4792c57db861536a2f9523dc26f7@3746 b58a29cf-3064-46da-94c6-1c29cc75c8e5
| modules/EmbedPlayer/mw.MediaElement.js | bandwith pref will always set source to closest bandwith allocation while not going over EmbedPlayer.UserBandwidth | <ide><path>odules/EmbedPlayer/mw.MediaElement.js
<ide> }
<ide> });
<ide>
<del> //Set via user bandwith pref
<add> //Set via user bandwith pref will always set source to closest bandwith allocation while not going over EmbedPlayer.UserBandwidth
<ide> if( $.cookie('EmbedPlayer.UserBandwidth') ){
<add> var currentMaxBadwith = 0;
<ide> $.each( playableSources, function(inx, source ){
<ide> if( source.bandwidth ){
<del> if( source.bandwidth < $.cookie('EmbedPlayer.UserBandwidth') ){
<add> if( source.bandwidth > currentMaxBadwith && source.bandwidth <= $.cookie('EmbedPlayer.UserBandwidth') ){
<add> currentMaxBadwith = source.bandwidth;
<ide> setSelectedSource( source );
<ide> }
<ide> } |
|
JavaScript | mit | a4a02b4cf02819a7cff446a632769fd1bf378c27 | 0 | ndm-io/wigwam-design-coming-soon,ndm-io/wigwam-design-coming-soon,ndm-io/wigwam-design-coming-soon | const data = {
singleWords: {
english: {
design: "design",
styling: "styling",
style: "style",
digital: "digital",
services: "services",
bespoke: "bespoke",
decoration: "decoration",
enhancing: "enhancing",
detail: "detail",
submit: "submit",
name: "name",
email: "email",
message: "message",
branding: "branding",
all: "all"
},
cymraeg: {
design: "dylunio",
styling: "steilio",
style: "steil",
digital: "ddigidol",
services: "gwasanaethau",
bespoke: "pwrpasol",
decoration: "addurno",
enhancing: "gwella",
detail: "manylion",
submit: "cyflwyno",
name: "enw",
email: "ebost",
message: "neges",
branding: "brandio",
all: "I gyd"
}
},
home: {
english: {
subtitle: "an interiors company"
},
cymraeg: {
subtitle: "cwmni dylunio mewnol"
}
},
nav: {
english: {
home: "home",
about: "about",
contact: "contact",
allRightsReserved: "all rights reserved"
},
cymraeg: {
home: "hafan",
about: "amdanom Ni",
contact: "cysylltu",
allRightsReserved: "cedwir pob hawl"
}
},
about: {
english: {
title: "About us",
subtitle: "WE DON’T HAVE A STYLE — WE HAVE STANDARDS",
aboutTheStudioTitle: "About the Studio",
aboutTheStudioBody: "Wigwam is a North Wales based interior design studio specialising in " +
"residential projects for private individuals. The studio helps clients " +
"across the North of England and Wales realise the full potential of their home",
whatWeDoTitle: "What we do",
whatWeDoBody: "Wigwam provides a personalised, relaxed design service from their studio in Llangollen. " +
"Sam designs each project personally, whilst also concentrating on what she does best and giving the " +
"client the peace of mind that the project will be carried out seamlessly.",
cupsOfTea: "cups of tea",
elleDecorMagazines: "elle decor magazines",
photosTaken: "photos taken",
happyClients: "happy clients",
bespokeBody: "If your focus is on an individual room, we’ll work with you to create a truly outstanding " +
"space that complements your taste and lifestyle. From a high spec kitchen to a sophisticated lounge or " +
"bedroom space, we will consider every aspect of your design and installation in meticulous detail.",
designBody: "Sam will work with you to unlock your property’s potential while improving the flow and " +
"functionality of the space, ensuring every detail works in harmony with the next.",
decorationBody: "If you’re simply planning to update an existing room or entirely refurbish a newly " +
"acquired property, you can commission our creative team to select the perfect finishes and specify a " +
"colour palette to complement your taste and transform your home.",
styleBody: "Whether you desire the understated elegance of a classic contemporary interior or the warmth " +
"of a traditional design, our talented designers have the vision to both meet and exceed your aspirations.",
enhancingBody: "Our focus is always on enhancing the inherent character and period of your property, " +
"combined with creating a space that is a true expression of your personality.",
detailBody: "We continue to add detail and thought to each and every individual aspect of the project " +
"right up until installation, liaising with our suppliers and contractors to achieve a design of " +
"impeccable quality."
},
cymraeg: {
title: "Amdanom ni",
subtitle: "NID YDYM YN CAEL ARDDULL - MAE GENNYM SAFONAU",
aboutTheStudioTitle: "Am y Stiwdio",
aboutTheStudioBody: "Wigwam yn stiwdio dylunio mewnol yn seiliedig Gogledd Cymru yn arbenigo mewn " +
"prosiectau preswyl ar gyfer unigolion preifat. Mae'r stiwdio yn helpu cleientiaid ar draws Gogledd " +
"Lloegr a Chymru gwireddu potensial llawn eu cartref.",
whatWeDoTitle: "Yr hyn a wnawn",
whatWeDoBody: "Wigwam yn darparu gwasanaeth dylunio personol, hamddenol o'u stiwdio yn Llangollen. " +
"Sam dyluniadau pob prosiect yn bersonol, tra hefyd yn canolbwyntio ar yr hyn y mae'n ei wneud orau " +
"ac yn rhoi tawelwch meddwl y bydd y prosiect yn cael ei wneud yn ddi-dor y cleient.",
cupsOfTea: "cwpanaid o de",
elleDecorMagazines: "cylchgronau elle decor",
photosTaken: "lluniau a dynnwyd",
happyClients: "cleientiaid yn hapus",
bespokeBody: "Os yw eich ffocws ar ystafell unigol, byddwn yn gweithio gyda chi i greu gofod " +
"gwirioneddol eithriadol sy'n ategu eich chwaeth a ffordd o fyw. O gegin fanyleb uchel i lolfa " +
"soffistigedig neu ofod ystafell wely, byddwn yn ystyried pob agwedd ar eich dylunio a gosod yn " +
"fanwl fanwl.",
designBody: "Bydd Sam yn gweithio gyda chi i ddatgloi potensial eich eiddo wrth wella llif ac " +
"ymarferoldeb y gofod, gan sicrhau bob manylyn yn gweithio mewn cytgord â'r nesaf.",
decorationBody: "Os ydych ond yn bwriadu diweddaru'r ystafell bresennol neu yn llwyr adnewyddu eiddo " +
"newydd ei brynu, gallwch gomisiynu ein tîm creadigol i ddewis y gorffeniadau perffaith a nodi palet " +
"lliw i gyd-fynd eich chwaeth a thrawsnewid eich cartref.",
styleBody: "P'un a ydych yn ei ddymuno ceinder cynnil o du cyfoes clasurol neu gynhesrwydd o ddyluniad " +
"traddodiadol, mae ein dylunwyr talentog yn cael y weledigaeth i gwrdd â rhagori ar eich dyheadau.",
enhancingBody: "Mae ein ffocws bob amser ar wella cymeriad cynhenid a chyfnod eich eiddo, ynghyd â " +
"chreu man sydd yn fynegiant cywir o'ch personoliaeth.",
detailBody: "Rydym yn parhau i ychwanegu manylion ac yn meddwl i bob agwedd unigol o'r prosiect hyd nes " +
"y gosod, cysylltu â'n cyflenwyr a chontractwyr i sicrhau dyluniad o ansawdd uchaf."
}
},
contact: {
english: {
title: "Contact us",
subtitle: "We love talking about design.",
getInTouch: "Get in touch",
leaveUsANote: "Leave us a quick note and we will get back in touch, or just use your favourite social " +
"media account."
},
cymraeg: {
title: "Cysylltu â ni",
subtitle: "rydym wrth fy modd yn siarad am ddylunio.",
getInTouch: "yn gadael i siarad",
leaveUsANote: "Gadewch nodyn sydyn a byddwn yn cysylltu â chi, neu dim ond yn defnyddio eich hoff " +
"gyfrif cyfryngau cymdeithasol."
}
},
errorMessages: {
keys: {
missingEmail: "missingEmail",
missingName: "missingName",
missingMessage: "missingMessage",
messageTooLong: "messageTooLong"
},
missingEmail: {
english: "We need a valid email address to get back to you",
cymraeg: "Mae angen cyfeiriad e-bost dilys arnom i ddychwelyd atoch chi"
},
missingName: {
english: "Please include your name",
cymraeg: "Cofiwch gynnwys eich enw"
},
missingMessage: {
english: "Please tell us what your contact is about!",
cymraeg: "Dywedwch wrthym beth yw eich cyswllt chi!"
},
messageTooLong: {
english: "Sorry, your message is a bit too long. Please cut to the chase!",
cymraeg: "Mae'n ddrwg gennym, mae eich neges yn ychydig yn rhy hir. Torrwch at yr olrhain!"
}
},
responseMessages: {
keys: {
thankYouMessage: "thankYouMessage",
errorMessage: "errorMessage",
missingInfoMessage: "missingInfoMessage"
},
thankYouMessage: {
english: "Thank you for your message. We will be in touch right away",
cymraeg: "Diolch am eich neges. Byddwn ni mewn cysylltiad cyn bo hir"
},
errorMessage: {
english: "Oh dear, seems something went wrong. Try finding us on social media instead",
cymraeg: "O annwyl, ymddengys bod rhywbeth yn mynd o'i le. Ceisiwch ddod o hyd i ni ar " +
"gyfryngau cymdeithasol yn lle hynny"
},
missingInfoMessage: {
english: "Sorry, it seems we are missing some information from you. Please try again",
cymraeg: "Mae'n ddrwg gennym, mae'n ymddangos ein bod yn colli rhywfaint o wybodaeth gennych chi. " +
"Trio eto os gwelwch yn dda"
}
}
};
module.exports = data; | src/scripts/translation/data/index.js | const data = {
singleWords: {
english: {
design: "design",
styling: "styling",
style: "style",
digital: "digital",
services: "services",
bespoke: "bespoke",
decoration: "decoration",
enhancing: "enhancing",
detail: "detail",
submit: "submit",
name: "name",
email: "email",
message: "message",
branding: "branding",
all: "all"
},
cymraeg: {
design: "dylunio",
styling: "steilio",
style: "steil",
digital: "ddigidol",
services: "gwasanaethau",
bespoke: "pwrpasol",
decoration: "addurno",
enhancing: "gwella",
detail: "manylion",
submit: "cyflwyno",
name: "enw",
email: "ebost",
message: "neges",
branding: "brandio",
all: "I gyd"
}
},
home: {
english: {
subtitle: "an interiors company"
},
cymraeg: {
subtitle: "cwmni dylunio mewnol"
}
},
nav: {
english: {
home: "home",
about: "about",
contact: "contact",
allRightsReserved: "all rights reserved"
},
cymraeg: {
home: "hafan",
about: "amdanom Ni",
contact: "cysylltu",
allRightsReserved: "cedwir pob hawl"
}
},
about: {
english: {
title: "About us",
subtitle: "WE DON’T HAVE A STYLE — WE HAVE STANDARDS",
aboutTheStudioTitle: "About the Studio",
aboutTheStudioBody: "Wigwam is a North Wales based interior design studio specialising in " +
"residential projects for private individuals. The studio helps clients " +
"across the North of England and Wales realise the full potential of their home",
whatWeDoTitle: "What we do",
whatWeDoBody: "Wigwam provides a personalised, relaxed design service from their studio in Llangollen. " +
"Sam designs each project personally, whilst also concentrating on what she does best and giving the " +
"client the peace of mind that the project will be carried out seamlessly.",
cupsOfTea: "cups of tea",
elleDecorMagazines: "elle decor magazines",
photosTaken: "photos taken",
happyClients: "happy clients",
bespokeBody: "If your focus is on an individual room, we’ll work with you to create a truly outstanding " +
"space that complements your taste and lifestyle. From a high spec kitchen to a sophisticated lounge or " +
"bedroom space, we will consider every aspect of your design and installation in meticulous detail.",
designBody: "Sam will work with you to unlock your property’s potential while improving the flow and " +
"functionality of the space, ensuring every detail works in harmony with the next.",
decorationBody: "If you’re simply planning to update an existing room or entirely refurbish a newly " +
"acquired property, you can commission our creative team to select the perfect finishes and specify a " +
"colour palette to complement your taste and transform your home.",
styleBody: "Whether you desire the understated elegance of a classic contemporary interior or the warmth " +
"of a traditional design, our talented designers have the vision to both meet and exceed your aspirations.",
enhancingBody: "Our focus is always on enhancing the inherent character and period of your property, " +
"combined with creating a space that is a true expression of your personality.",
detailBody: "We continue to add detail and thought to each and every individual aspect of the project " +
"right up until installation, liaising with our suppliers and contractors to achieve a design of " +
"impeccable quality."
},
cymraeg: {
title: "Amdanom ni",
subtitle: "NID YDYM YN CAEL ARDDULL - MAE GENNYM SAFONAU",
aboutTheStudioTitle: "Am y Stiwdio",
aboutTheStudioBody: "Wigwam yn stiwdio dylunio mewnol yn seiliedig Gogledd Cymru yn arbenigo mewn " +
"prosiectau preswyl ar gyfer unigolion preifat. Mae'r stiwdio yn helpu cleientiaid ar draws Gogledd " +
"Lloegr a Chymru gwireddu potensial llawn eu cartref.",
whatWeDoTitle: "Yr hyn a wnawn",
whatWeDoBody: "Wigwam yn darparu gwasanaeth dylunio personol, hamddenol o'u stiwdio yn Llangollen. " +
"Sam dyluniadau pob prosiect yn bersonol, tra hefyd yn canolbwyntio ar yr hyn y mae'n ei wneud orau " +
"ac yn rhoi tawelwch meddwl y bydd y prosiect yn cael ei wneud yn ddi-dor y cleient.",
cupsOfTea: "cwpanaid o de",
elleDecorMagazines: "cylchgronau elle decor",
photosTaken: "lluniau a dynnwyd",
happyClients: "cleientiaid yn hapus",
bespokeBody: "Os yw eich ffocws ar ystafell unigol, byddwn yn gweithio gyda chi i greu gofod gwirioneddol " +
"eithriadol sy'n ategu eich chwaeth a ffordd o fyw. O gegin fanyleb uchel i lolfa soffistigedig neu " +
"ofod ystafell wely, byddwn yn ystyried pob agwedd ar eich dylunio a gosod yn fanwl fanwl.",
designBody: "Bydd Sam yn gweithio gyda chi i ddatgloi potensial eich eiddo wrth wella llif ac " +
"ymarferoldeb y gofod, gan sicrhau bob manylyn yn gweithio mewn cytgord â'r nesaf.",
decorationBody: "Os ydych ond yn bwriadu diweddaru'r ystafell bresennol neu yn llwyr adnewyddu eiddo " +
"newydd ei brynu, gallwch gomisiynu ein tîm creadigol i ddewis y gorffeniadau perffaith a nodi palet " +
"lliw i gyd-fynd eich chwaeth a thrawsnewid eich cartref.",
styleBody: "P'un a ydych yn ei ddymuno ceinder cynnil o du cyfoes clasurol neu gynhesrwydd o ddyluniad " +
"traddodiadol, mae ein dylunwyr talentog yn cael y weledigaeth i gwrdd â rhagori ar eich dyheadau.",
enhancingBody: "Mae ein ffocws bob amser ar wella cymeriad cynhenid a chyfnod eich eiddo, ynghyd â " +
"chreu man sydd yn fynegiant cywir o'ch personoliaeth.",
detailBody: "Rydym yn parhau i ychwanegu manylion ac yn meddwl i bob agwedd unigol o'r prosiect hyd nes " +
"y gosod, cysylltu â'n cyflenwyr a chontractwyr i sicrhau dyluniad o ansawdd uchaf."
}
},
contact: {
english: {
title: "Contact us",
subtitle: "We love talking about design.",
getInTouch: "Get in touch",
leaveUsANote: "Leave us a quick note and we will get back in touch, or just use your favourite social " +
"media account."
},
cymraeg: {
title: "Cysylltu â ni",
subtitle: "rydym wrth fy modd yn siarad am ddylunio.",
getInTouch: "yn gadael i siarad",
leaveUsANote: "Gadewch nodyn sydyn a byddwn yn cysylltu â chi, neu dim ond yn defnyddio eich hoff " +
"gyfrif cyfryngau cymdeithasol."
}
},
errorMessages: {
keys: {
missingEmail: "missingEmail",
missingName: "missingName",
missingMessage: "missingMessage",
messageTooLong: "messageTooLong"
},
missingEmail: {
english: "We need a valid email address to get back to you",
cymraeg: "Mae angen cyfeiriad e-bost dilys arnom i ddychwelyd atoch chi"
},
missingName: {
english: "Please include your name",
cymraeg: "Cofiwch gynnwys eich enw"
},
missingMessage: {
english: "Please tell us what your contact is about!",
cymraeg: "Dywedwch wrthym beth yw eich cyswllt chi!"
},
messageTooLong: {
english: "Sorry, your message is a bit too long. Please cut to the chase!",
cymraeg: "Mae'n ddrwg gennym, mae eich neges yn ychydig yn rhy hir. Torrwch at yr olrhain!"
}
},
responseMessages: {
keys: {
thankYouMessage: "thankYouMessage",
errorMessage: "errorMessage",
missingInfoMessage: "missingInfoMessage"
},
thankYouMessage: {
english: "Thank you for your message. We will be in touch right away",
cymraeg: "Diolch am eich neges. Byddwn ni mewn cysylltiad cyn bo hir"
},
errorMessage: {
english: "Oh dear, seems something went wrong. Try finding us on social media instead",
cymraeg: "O annwyl, ymddengys bod rhywbeth yn mynd o'i le. Ceisiwch ddod o hyd i ni ar " +
"gyfryngau cymdeithasol yn lle hynny"
},
missingInfoMessage: {
english: "Sorry, it seems we are missing some information from you. Please try again",
cymraeg: "Mae'n ddrwg gennym, mae'n ymddangos ein bod yn colli rhywfaint o wybodaeth gennych chi. " +
"Trio eto os gwelwch yn dda"
}
}
};
module.exports = data; | final changes ready for merge
| src/scripts/translation/data/index.js | final changes ready for merge | <ide><path>rc/scripts/translation/data/index.js
<ide> elleDecorMagazines: "cylchgronau elle decor",
<ide> photosTaken: "lluniau a dynnwyd",
<ide> happyClients: "cleientiaid yn hapus",
<del> bespokeBody: "Os yw eich ffocws ar ystafell unigol, byddwn yn gweithio gyda chi i greu gofod gwirioneddol " +
<del> "eithriadol sy'n ategu eich chwaeth a ffordd o fyw. O gegin fanyleb uchel i lolfa soffistigedig neu " +
<del> "ofod ystafell wely, byddwn yn ystyried pob agwedd ar eich dylunio a gosod yn fanwl fanwl.",
<add> bespokeBody: "Os yw eich ffocws ar ystafell unigol, byddwn yn gweithio gyda chi i greu gofod " +
<add> "gwirioneddol eithriadol sy'n ategu eich chwaeth a ffordd o fyw. O gegin fanyleb uchel i lolfa " +
<add> "soffistigedig neu ofod ystafell wely, byddwn yn ystyried pob agwedd ar eich dylunio a gosod yn " +
<add> "fanwl fanwl.",
<ide> designBody: "Bydd Sam yn gweithio gyda chi i ddatgloi potensial eich eiddo wrth wella llif ac " +
<ide> "ymarferoldeb y gofod, gan sicrhau bob manylyn yn gweithio mewn cytgord â'r nesaf.",
<ide> decorationBody: "Os ydych ond yn bwriadu diweddaru'r ystafell bresennol neu yn llwyr adnewyddu eiddo " + |
|
Java | mit | bcb21292835ff867e984af26fcfa8852c1ff829a | 0 | turbolinks/turbolinks-android,gavinliu/turbolinks-android,turbolinks/turbolinks-android,timoschloesser/turbolinks-android,turbolinks/turbolinks-android,timoschloesser/turbolinks-android,turbolinks/turbolinks-android,timoschloesser/turbolinks-android,timoschloesser/turbolinks-android,gavinliu/turbolinks-android,gavinliu/turbolinks-android,gavinliu/turbolinks-android | package com.basecamp.turbolinks;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Handler;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.widget.FrameLayout;
import android.widget.ImageView;
/**
* <p>The custom view to add to your activity layout.</p>
*/
public class TurbolinksView extends FrameLayout {
private View progressView = null;
private TurbolinksSession turbolinksSession;
private ImageView screenshotView = null;
private int screenshotOrientation = 0;
// ---------------------------------------------------
// Constructors
// ---------------------------------------------------
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
*/
public TurbolinksView(Context context) {
super(context);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
*/
public TurbolinksView(Context context, AttributeSet attrs) {
super(context, attrs);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
* @param defStyleAttr Refer to FrameLayout.
*/
public TurbolinksView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
* @param defStyleAttr Refer to FrameLayout.
* @param defStyleRes Refer to FrameLayout.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public TurbolinksView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
// ---------------------------------------------------
// Package public
// ---------------------------------------------------
/**
* <p>Shows a progress view or a generated screenshot of the webview content (if available)
* on top of the webview. When advancing to a new url, this indicates that the page is still
* loading. When resuming an activity in the navigation stack, a screenshot is displayed while the
* webview is restoring its snapshot.</p>
* <p>Progress indicator is set to a specified delay before displaying -- a very short delay
* (like 500 ms) can improve perceived loading time to the user.</p>
*
* @param progressView The progressView to display on top of TurbolinksView.
* @param progressIndicator The progressIndicator to display in the view.
* @param delay The delay before showing the progressIndicator in the view. The default progress view
* is 500 ms.
*/
void showProgress(final View progressView, final View progressIndicator, int delay) {
TurbolinksLog.d("showProgress called");
// Don't show the progress view if a screenshot is available
if (screenshotView != null && screenshotOrientation == getOrientation()) return;
hideProgress();
this.progressView = progressView;
progressView.setClickable(true);
addView(progressView);
progressIndicator.setVisibility(View.GONE);
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
progressIndicator.setVisibility(View.VISIBLE);
}
}, delay);
}
/**
* <p>Removes the progress view and/or screenshot from the TurbolinksView, so the webview is
* visible underneath.</p>
*/
void hideProgress() {
removeProgressView();
removeScreenshotView();
}
/**
* <p>Attach the shared webView to the TurbolinksView.</p>
*
* @param webView The shared webView.
* @param swipeRefreshLayout parent view of webView
* @param screenshotsEnabled Indicates whether screenshots are enabled for the current session.
*/
void attachWebView(WebView webView, TurbolinksSwipeRefreshLayout swipeRefreshLayout, boolean screenshotsEnabled) {
if (swipeRefreshLayout.getParent() == this) return;
if (swipeRefreshLayout.getParent() instanceof TurbolinksView) {
TurbolinksView parent = (TurbolinksView) swipeRefreshLayout.getParent();
if (screenshotsEnabled) parent.screenshotView();
parent.removeView(swipeRefreshLayout);
}
removeChildViewFromSwipeRefresh(webView);
// Set the webview background to match the container background
if (getBackground() instanceof ColorDrawable) {
webView.setBackgroundColor(((ColorDrawable) getBackground()).getColor());
}
swipeRefreshLayout.addView(webView);
addView(swipeRefreshLayout, 0);
}
private void removeChildViewFromSwipeRefresh(View child) {
ViewGroup parent = (ViewGroup) child.getParent();
if (parent != null) {
parent.removeView(child);
}
}
/**
* Removes the progress view as a child of TurbolinksView
*/
private void removeProgressView() {
if (progressView == null) return;
removeView(progressView);
TurbolinksLog.d("Progress view removed");
}
/**
* Removes the screenshot view as a child of TurbolinksView
*/
private void removeScreenshotView() {
if (screenshotView == null) return;
removeView(screenshotView);
screenshotView = null;
TurbolinksLog.d("Screenshot removed");
}
/**
* <p>Creates a screenshot of the current webview content and makes it the top visible view.</p>
*/
private void screenshotView() {
// Only take a screenshot if the activity is not finishing
if (getContext() instanceof Activity && ((Activity) getContext()).isFinishing()) return;
Bitmap screenshot = getScreenshotBitmap();
if (screenshot == null) return;
screenshotView = new ImageView(getContext());
screenshotView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
screenshotView.setClickable(true);
screenshotView.setImageBitmap(screenshot);
screenshotOrientation = getOrientation();
addView(screenshotView);
TurbolinksLog.d("Screenshot taken");
}
/**
* <p>Creates a bitmap screenshot of the webview contents from the canvas.</p>
* @return The screenshot of the webview contents.
*/
private Bitmap getScreenshotBitmap() {
if (getWidth() <= 0 || getHeight() <= 0) return null;
Bitmap bitmap = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
draw(new Canvas(bitmap));
return bitmap;
}
/**
* Gets the current orientation of the device.
* @return The current orientation.
*/
private int getOrientation() {
return getContext().getResources().getConfiguration().orientation;
}
}
| turbolinks/src/main/java/com/basecamp/turbolinks/TurbolinksView.java | package com.basecamp.turbolinks;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Handler;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import android.widget.FrameLayout;
import android.widget.ImageView;
/**
* <p>The custom view to add to your activity layout.</p>
*/
public class TurbolinksView extends FrameLayout {
private View progressView = null;
private TurbolinksSession turbolinksSession;
private ImageView screenshotView = null;
private int screenshotOrientation = 0;
// ---------------------------------------------------
// Constructors
// ---------------------------------------------------
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
*/
public TurbolinksView(Context context) {
super(context);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
*/
public TurbolinksView(Context context, AttributeSet attrs) {
super(context, attrs);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
* @param defStyleAttr Refer to FrameLayout.
*/
public TurbolinksView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* <p>Constructor to match FrameLayout.</p>
*
* @param context Refer to FrameLayout.
* @param attrs Refer to FrameLayout.
* @param defStyleAttr Refer to FrameLayout.
* @param defStyleRes Refer to FrameLayout.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public TurbolinksView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
}
// ---------------------------------------------------
// Package public
// ---------------------------------------------------
/**
* <p>Shows a progress view or a generated screenshot of the webview content (if available)
* on top of the webview. When advancing to a new url, this indicates that the page is still
* loading. When resuming an activity in the navigation stack, a screenshot is displayed while the
* webview is restoring its snapshot.</p>
* <p>Progress indicator is set to a specified delay before displaying -- a very short delay
* (like 500 ms) can improve perceived loading time to the user.</p>
*
* @param progressView The progressView to display on top of TurbolinksView.
* @param progressIndicator The progressIndicator to display in the view.
* @param delay The delay before showing the progressIndicator in the view. The default progress view
* is 500 ms.
*/
void showProgress(final View progressView, final View progressIndicator, int delay) {
TurbolinksLog.d("showProgress called");
// Don't show the progress view if a screenshot is available
if (screenshotView != null && screenshotOrientation == getOrientation()) return;
hideProgress();
this.progressView = progressView;
progressView.setClickable(true);
addView(progressView);
progressIndicator.setVisibility(View.GONE);
final Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
progressIndicator.setVisibility(View.VISIBLE);
}
}, delay);
}
/**
* <p>Removes the progress view and/or screenshot from the TurbolinksView, so the webview is
* visible underneath.</p>
*/
void hideProgress() {
removeProgressView();
removeScreenshotView();
}
/**
* <p>Attach the shared webView to the TurbolinksView.</p>
*
* @param webView The shared webView.
* @param screenshotsEnabled Indicates whether screenshots are enabled for the current session.
*/
void attachWebView(WebView webView, TurbolinksSwipeRefreshLayout swipeRefreshLayout, boolean screenshotsEnabled) {
if (swipeRefreshLayout.getParent() == this) return;
if (swipeRefreshLayout.getParent() instanceof TurbolinksView) {
TurbolinksView parent = (TurbolinksView) swipeRefreshLayout.getParent();
if (screenshotsEnabled) parent.screenshotView();
parent.removeView(swipeRefreshLayout);
}
removeChildViewFromSwipeRefresh(webView);
// Set the webview background to match the container background
if (getBackground() instanceof ColorDrawable) {
webView.setBackgroundColor(((ColorDrawable) getBackground()).getColor());
}
swipeRefreshLayout.addView(webView);
addView(swipeRefreshLayout, 0);
}
private void removeChildViewFromSwipeRefresh(View child) {
ViewGroup parent = (ViewGroup) child.getParent();
if (parent != null) {
parent.removeView(child);
}
}
/**
* Removes the progress view as a child of TurbolinksView
*/
private void removeProgressView() {
if (progressView == null) return;
removeView(progressView);
TurbolinksLog.d("Progress view removed");
}
/**
* Removes the screenshot view as a child of TurbolinksView
*/
private void removeScreenshotView() {
if (screenshotView == null) return;
removeView(screenshotView);
screenshotView = null;
TurbolinksLog.d("Screenshot removed");
}
/**
* <p>Creates a screenshot of the current webview content and makes it the top visible view.</p>
*/
private void screenshotView() {
// Only take a screenshot if the activity is not finishing
if (getContext() instanceof Activity && ((Activity) getContext()).isFinishing()) return;
Bitmap screenshot = getScreenshotBitmap();
if (screenshot == null) return;
screenshotView = new ImageView(getContext());
screenshotView.setLayoutParams(new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
screenshotView.setClickable(true);
screenshotView.setImageBitmap(screenshot);
screenshotOrientation = getOrientation();
addView(screenshotView);
TurbolinksLog.d("Screenshot taken");
}
/**
* <p>Creates a bitmap screenshot of the webview contents from the canvas.</p>
* @return The screenshot of the webview contents.
*/
private Bitmap getScreenshotBitmap() {
if (getWidth() <= 0 || getHeight() <= 0) return null;
Bitmap bitmap = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
draw(new Canvas(bitmap));
return bitmap;
}
/**
* Gets the current orientation of the device.
* @return The current orientation.
*/
private int getOrientation() {
return getContext().getResources().getConfiguration().orientation;
}
}
| Update Java Docs
| turbolinks/src/main/java/com/basecamp/turbolinks/TurbolinksView.java | Update Java Docs | <ide><path>urbolinks/src/main/java/com/basecamp/turbolinks/TurbolinksView.java
<ide> * <p>Attach the shared webView to the TurbolinksView.</p>
<ide> *
<ide> * @param webView The shared webView.
<add> * @param swipeRefreshLayout parent view of webView
<ide> * @param screenshotsEnabled Indicates whether screenshots are enabled for the current session.
<ide> */
<ide> void attachWebView(WebView webView, TurbolinksSwipeRefreshLayout swipeRefreshLayout, boolean screenshotsEnabled) { |
|
Java | apache-2.0 | error: pathspec 'lineagehw/org/lineage/hardware/SunlightEnhancement.java' did not match any file(s) known to git
| 686fd9ad65a1c5da79c3bd9f836a128e25acf0c5 | 1 | TeamNexus/android_device_samsung_zero-common,TeamNexus/android_device_samsung_zero-common,TeamNexus/android_device_samsung_zero-common,TeamNexus/android_device_samsung_zero-common,TeamNexus/android_device_samsung_zero-common | /*
* Copyright (C) 2014 The CyanogenMod Project
* Copyright (C) 2018 The LineageOS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lineageos.hardware;
import org.lineageos.internal.util.FileUtils;
/**
* Sunlight Readability Enhancement support, aka Facemelt Mode.
*
* Brightens up the screen via image processing or other tricks when
* under aggressive lighting conditions. Usually depends on CABC
* support.
*/
public class SunlightEnhancement {
private static final String LUX_PATH = "/sys/class/mdnie/mdnie/lux";
/* see drivers/video/fbdev/exynos/decon_7880/panels/mdnie_lite_table*, get_hbm_index */
private static final String LUX_VALUE = "40000";
/**
* Whether device supports SRE
*
* @return boolean Supported devices must return always true
*/
public static boolean isSupported() {
return FileUtils.isFileWritable(LUX_PATH);
}
/**
* This method return the current activation status of SRE
*
* @return boolean Must be false when SRE is not supported or not activated, or
* the operation failed while reading the status; true in any other case.
*/
public static boolean isEnabled() {
return !("0".equals(FileUtils.readOneLine(LUX_PATH)));
}
/**
* This method allows to setup SRE.
*
* @param status The new SRE status
* @return boolean Must be false if SRE is not supported or the operation
* failed; true in any other case.
*/
public static boolean setEnabled(boolean status) {
return FileUtils.writeLine(LUX_PATH, status ? LUX_VALUE : "0");
}
/**
* Whether adaptive backlight (CABL / CABC) is required to be enabled
*
* @return boolean False if adaptive backlight is not a dependency
*/
public static boolean isAdaptiveBacklightRequired() { return false; }
/**
* Set this to true if the implementation is self-managed and does
* it's own ambient sensing. In this case, setEnabled is assumed
* to toggle the feature on or off, but not activate it. If set
* to false, LiveDisplay will call setEnabled when the ambient lux
* threshold is crossed.
*
* @return true if this enhancement is self-managed
*/
public static boolean isSelfManaged() { return false; }
}
| lineagehw/org/lineage/hardware/SunlightEnhancement.java | zero: lineahehw: add SunlightEnhancement
Change-Id: I63292064fee0180f3620c59b19a648e11738d35b
| lineagehw/org/lineage/hardware/SunlightEnhancement.java | zero: lineahehw: add SunlightEnhancement | <ide><path>ineagehw/org/lineage/hardware/SunlightEnhancement.java
<add>/*
<add> * Copyright (C) 2014 The CyanogenMod Project
<add> * Copyright (C) 2018 The LineageOS Project
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.lineageos.hardware;
<add>
<add>import org.lineageos.internal.util.FileUtils;
<add>
<add>/**
<add> * Sunlight Readability Enhancement support, aka Facemelt Mode.
<add> *
<add> * Brightens up the screen via image processing or other tricks when
<add> * under aggressive lighting conditions. Usually depends on CABC
<add> * support.
<add> */
<add>public class SunlightEnhancement {
<add>
<add> private static final String LUX_PATH = "/sys/class/mdnie/mdnie/lux";
<add>
<add> /* see drivers/video/fbdev/exynos/decon_7880/panels/mdnie_lite_table*, get_hbm_index */
<add> private static final String LUX_VALUE = "40000";
<add>
<add> /**
<add> * Whether device supports SRE
<add> *
<add> * @return boolean Supported devices must return always true
<add> */
<add> public static boolean isSupported() {
<add> return FileUtils.isFileWritable(LUX_PATH);
<add> }
<add>
<add> /**
<add> * This method return the current activation status of SRE
<add> *
<add> * @return boolean Must be false when SRE is not supported or not activated, or
<add> * the operation failed while reading the status; true in any other case.
<add> */
<add> public static boolean isEnabled() {
<add> return !("0".equals(FileUtils.readOneLine(LUX_PATH)));
<add> }
<add>
<add> /**
<add> * This method allows to setup SRE.
<add> *
<add> * @param status The new SRE status
<add> * @return boolean Must be false if SRE is not supported or the operation
<add> * failed; true in any other case.
<add> */
<add> public static boolean setEnabled(boolean status) {
<add> return FileUtils.writeLine(LUX_PATH, status ? LUX_VALUE : "0");
<add> }
<add>
<add> /**
<add> * Whether adaptive backlight (CABL / CABC) is required to be enabled
<add> *
<add> * @return boolean False if adaptive backlight is not a dependency
<add> */
<add> public static boolean isAdaptiveBacklightRequired() { return false; }
<add>
<add> /**
<add> * Set this to true if the implementation is self-managed and does
<add> * it's own ambient sensing. In this case, setEnabled is assumed
<add> * to toggle the feature on or off, but not activate it. If set
<add> * to false, LiveDisplay will call setEnabled when the ambient lux
<add> * threshold is crossed.
<add> *
<add> * @return true if this enhancement is self-managed
<add> */
<add> public static boolean isSelfManaged() { return false; }
<add>
<add>} |
|
Java | bsd-3-clause | b2d083666c08f1380922f4708a62de79958340f6 | 0 | NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers | package gov.nih.nci.cabig.caaers.web.participant;
import gov.nih.nci.cabig.caaers.domain.*;
import gov.nih.nci.cabig.caaers.domain.repository.OrganizationRepository;
import gov.nih.nci.cabig.caaers.utils.ConfigProperty;
import gov.nih.nci.cabig.caaers.utils.Lov;
import gov.nih.nci.cabig.caaers.web.ListValues;
import gov.nih.nci.cabig.caaers.web.fields.*;
import gov.nih.nci.cabig.caaers.web.utils.WebUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.validation.Errors;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class CreateParticipantTab<T extends ParticipantInputCommand> extends TabWithFields<T> {
public CreateParticipantTab() {
super("Enter Subject Information", "Details", "par/par_create_participant");
}
OrganizationRepository organizationRepository;
private ListValues listValues;
private ConfigProperty configurationProperty;
private static final String PARTICIPANT_FIELD_GROUP = "participant";
private static final String SITE_FIELD_GROUP = "site";
public Map<String, InputFieldGroup> createFieldGroups(ParticipantInputCommand command) {
InputFieldGroup participantFieldGroup;
InputFieldGroup siteFieldGroup;
RepeatingFieldGroupFactory repeatingFieldGroupFactoryOrg;
RepeatingFieldGroupFactory repeatingFieldGroupFactorySys;
siteFieldGroup = new DefaultInputFieldGroup(SITE_FIELD_GROUP);
Map<Object, Object> options = new LinkedHashMap<Object, Object>();
options.put("", "Please select");
List<Organization> organizations = organizationRepository.getOrganizationsHavingStudySites();
if (organizations != null) {
options.putAll(WebUtils.collectOptions(organizations, "id", "fullName"));
}
siteFieldGroup.getFields().add(InputFieldFactory.createSelectField("organization", "Site", true, options));
participantFieldGroup = new DefaultInputFieldGroup(PARTICIPANT_FIELD_GROUP);
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.firstName", "First Name", true));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.lastName", "Last Name", true));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.maidenName", "Maiden Name", false));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.middleName", "Middle Name", false));
InputField dobField = InputFieldFactory.createSplitDateField("participant.dateOfBirth", "Date of birth", false, true, true, true);
// CompositeField dobField = new CompositeField("participant.dateOfBirth", new DefaultInputFieldGroup(null, "Date of birth").
// addField(dobYear).addField(dobMonth).addField(dobDay));
// dobField.setRequired(true);
// dobField.getAttributes().put(InputField.HELP, "par.par_create_participant.participant.dateOfBirth");
participantFieldGroup.getFields().add(dobField);
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.gender", "Gender", true, collectOptions(listValues.getParticipantGender())));
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.ethnicity", "Ethnicity", true, collectOptions(listValues.getParticipantEthnicity())));
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.race", "Race", true, collectOptions(listValues.getParticipantRace())));
repeatingFieldGroupFactoryOrg = new RepeatingFieldGroupFactory("mainOrg", "participant.organizationIdentifiers");
repeatingFieldGroupFactorySys = new RepeatingFieldGroupFactory("mainSys", "participant.systemAssignedIdentifiers");
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createTextField("value", "Identifier", true));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createTextField("value", "Identifier", true));
options = new LinkedHashMap<Object, Object>();
List<Lov> list = configurationProperty.getMap().get("participantIdentifiersType");
options.put("", "Please select");
options.putAll(WebUtils.collectOptions(list, "code", "desc"));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createAutocompleterField("organization", "Organization Identifier", true));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createTextField("systemName", "System Name", true));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createCheckboxField("primaryIndicator", "Primary Indicator"));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createCheckboxField("primaryIndicator", "Primary Indicator"));
InputFieldGroupMap map = new InputFieldGroupMap();
if (command.getParticipant() != null) {
map.addRepeatingFieldGroupFactory(repeatingFieldGroupFactoryOrg, command.getParticipant().getOrganizationIdentifiers().size());
map.addRepeatingFieldGroupFactory(repeatingFieldGroupFactorySys, command.getParticipant().getSystemAssignedIdentifiers().size());
}
map.addInputFieldGroup(participantFieldGroup);
map.addInputFieldGroup(siteFieldGroup);
return map;
}
protected Map<Object, Object> collectOptions(final List<ListValues> list) {
Map<Object, Object> options = new LinkedHashMap<Object, Object>();
options.putAll(WebUtils.collectOptions(list, "code", "desc"));
return options;
}
@Override
public Map<String, Object> referenceData(HttpServletRequest request, T command) {
Map<String, Object> refdata = super.referenceData(request, command);
refdata.put("action", "New");
return refdata;
}
@Override
protected void validate(T command, BeanWrapper commandBean, Map<String, InputFieldGroup> fieldGroups, Errors errors) {
boolean hasPrimaryID = false;
DateValue dob = command.getParticipant().getDateOfBirth();
if (dob.checkIfDateIsInValid()) {
errors.rejectValue("participant.dateOfBirth", "REQUIRED", "Incorrect Date Of Birth");
}
for (Identifier identifier : command.getParticipant().getIdentifiersLazy()) {
if (identifier.isPrimary()) {
if (hasPrimaryID) {
// there are at least 2 Primary ID selected
hasPrimaryID = false;
break;
}
hasPrimaryID = true;
}
}
if (!hasPrimaryID)
errors.rejectValue("participant.identifiers", "REQUIRED", "Please Include exactly One Primary Identifier");
}
public OrganizationRepository getOrganizationRepository() {
return organizationRepository;
}
public void setOrganizationRepository(OrganizationRepository organizationRepository) {
this.organizationRepository = organizationRepository;
}
public void setListValues(final ListValues listValues) {
this.listValues = listValues;
}
public ConfigProperty getConfigurationProperty() {
return configurationProperty;
}
public void setConfigurationProperty(ConfigProperty configurationProperty) {
this.configurationProperty = configurationProperty;
}
public ModelAndView addOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<OrganizationAssignedIdentifier> list = command.getParticipant().getOrganizationIdentifiers();
// store the new index for the new Identifier
int size = list.size();
Integer[] indexes = new Integer[]{size};
modelAndView.getModel().put("indexes", indexes);
// store the new Identifier object into the command.participant
OrganizationAssignedIdentifier newIdentifier = new OrganizationAssignedIdentifier();
command.getParticipant().addIdentifier(newIdentifier);
return modelAndView;
}
public ModelAndView removeOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<OrganizationAssignedIdentifier> list = command.getParticipant().getOrganizationIdentifiers();
list.remove(list.get(command.getIndex()));
// update the array of remainning indexes after deleting
int size = list.size();
Integer[] indexes = new Integer[size];
for (int i = 0; i < size; i++) {
indexes[i] = i;
}
modelAndView.getModel().put("indexes", indexes);
return modelAndView;
}
public ModelAndView addSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<SystemAssignedIdentifier> list = command.getParticipant().getSystemAssignedIdentifiers();
// store the new index for the new Identifier
int size = list.size();
Integer[] indexes = new Integer[]{size};
modelAndView.getModel().put("indexes", indexes);
// store the new Identifier object into the command.participant
SystemAssignedIdentifier newIdentifier = new SystemAssignedIdentifier();
command.getParticipant().addIdentifier(newIdentifier);
return modelAndView;
}
public ModelAndView removeSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<SystemAssignedIdentifier> list = command.getParticipant().getSystemAssignedIdentifiers();
list.remove(list.get(command.getIndex()));
// update the array of remainning indexes after deleting
int size = list.size();
Integer[] indexes = new Integer[size];
for (int i = 0; i < size; i++) {
indexes[i] = i;
}
modelAndView.getModel().put("indexes", indexes);
return modelAndView;
}
}
| projects/web/src/main/java/gov/nih/nci/cabig/caaers/web/participant/CreateParticipantTab.java | package gov.nih.nci.cabig.caaers.web.participant;
import gov.nih.nci.cabig.caaers.domain.*;
import gov.nih.nci.cabig.caaers.domain.repository.OrganizationRepository;
import gov.nih.nci.cabig.caaers.utils.ConfigProperty;
import gov.nih.nci.cabig.caaers.utils.Lov;
import gov.nih.nci.cabig.caaers.web.ListValues;
import gov.nih.nci.cabig.caaers.web.fields.*;
import gov.nih.nci.cabig.caaers.web.utils.WebUtils;
import org.springframework.beans.BeanWrapper;
import org.springframework.validation.Errors;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
public class CreateParticipantTab<T extends ParticipantInputCommand> extends TabWithFields<T> {
public CreateParticipantTab() {
super("Enter Subject Information", "Details", "par/par_create_participant");
}
OrganizationRepository organizationRepository;
private ListValues listValues;
private ConfigProperty configurationProperty;
private static final String PARTICIPANT_FIELD_GROUP = "participant";
private static final String SITE_FIELD_GROUP = "site";
public Map<String, InputFieldGroup> createFieldGroups(ParticipantInputCommand command) {
InputFieldGroup participantFieldGroup;
InputFieldGroup siteFieldGroup;
RepeatingFieldGroupFactory repeatingFieldGroupFactoryOrg;
RepeatingFieldGroupFactory repeatingFieldGroupFactorySys;
siteFieldGroup = new DefaultInputFieldGroup(SITE_FIELD_GROUP);
Map<Object, Object> options = new LinkedHashMap<Object, Object>();
options.put("", "Please select");
List<Organization> organizations = organizationRepository.getOrganizationsHavingStudySites();
if (organizations != null) {
options.putAll(WebUtils.collectOptions(organizations, "id", "fullName"));
}
siteFieldGroup.getFields().add(InputFieldFactory.createSelectField("organization", "Site", true, options));
participantFieldGroup = new DefaultInputFieldGroup(PARTICIPANT_FIELD_GROUP);
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.firstName", "First Name", true));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.lastName", "Last Name", true));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.maidenName", "Maiden Name", false));
participantFieldGroup.getFields().add(InputFieldFactory.createTextField("participant.middleName", "Middle Name", false));
InputField dobField = InputFieldFactory.createSplitDateField("participant.dateOfBirth", "Date of birth", false, true, true, true);
// CompositeField dobField = new CompositeField("participant.dateOfBirth", new DefaultInputFieldGroup(null, "Date of birth").
// addField(dobYear).addField(dobMonth).addField(dobDay));
// dobField.setRequired(true);
// dobField.getAttributes().put(InputField.HELP, "par.par_create_participant.participant.dateOfBirth");
participantFieldGroup.getFields().add(dobField);
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.gender", "Gender", true, collectOptions(listValues.getParticipantGender())));
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.ethnicity", "Ethnicity", true, collectOptions(listValues.getParticipantEthnicity())));
participantFieldGroup.getFields().add(InputFieldFactory.createSelectField("participant.race", "Race", true, collectOptions(listValues.getParticipantRace())));
repeatingFieldGroupFactoryOrg = new RepeatingFieldGroupFactory("mainOrg", "participant.organizationIdentifiers");
repeatingFieldGroupFactorySys = new RepeatingFieldGroupFactory("mainSys", "participant.systemAssignedIdentifiers");
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createTextField("value", "Identifier", true));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createTextField("value", "Identifier", true));
options = new LinkedHashMap<Object, Object>();
List<Lov> list = configurationProperty.getMap().get("participantIdentifiersType");
options.put("", "Please select");
options.putAll(WebUtils.collectOptions(list, "code", "desc"));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createAutocompleterField("organization", "Organization Identifier", true));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createTextField("systemName", "System Name", true));
repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createCheckboxField("primaryIndicator", "Primary Indicator"));
repeatingFieldGroupFactorySys.addField(InputFieldFactory.createCheckboxField("primaryIndicator", "Primary Indicator"));
InputFieldGroupMap map = new InputFieldGroupMap();
if (command.getParticipant() != null) {
map.addRepeatingFieldGroupFactory(repeatingFieldGroupFactoryOrg, command.getParticipant().getOrganizationIdentifiers().size());
map.addRepeatingFieldGroupFactory(repeatingFieldGroupFactorySys, command.getParticipant().getSystemAssignedIdentifiers().size());
}
map.addInputFieldGroup(participantFieldGroup);
map.addInputFieldGroup(siteFieldGroup);
return map;
}
protected Map<Object, Object> collectOptions(final List<ListValues> list) {
Map<Object, Object> options = new LinkedHashMap<Object, Object>();
options.putAll(WebUtils.collectOptions(list, "code", "desc"));
return options;
}
@Override
public Map<String, Object> referenceData(HttpServletRequest request, T command) {
Map<String, Object> refdata = super.referenceData(request, command);
refdata.put("action", "New");
return refdata;
}
@Override
protected void validate(T command, BeanWrapper commandBean, Map<String, InputFieldGroup> fieldGroups, Errors errors) {
boolean hasPrimaryID = false;
DateValue dob = command.getParticipant().getDateOfBirth();
if (dob.checkIfDateIsInValid()) {
errors.rejectValue("participant.dateOfBirth", "REQUIRED", "Incorrect Date Of Birth");
}
for (Identifier identifier : command.getParticipant().getIdentifiersLazy()) {
if (identifier.isPrimary()) {
if (hasPrimaryID) {
// there are at least 2 Primary ID selected
hasPrimaryID = false;
break;
}
hasPrimaryID = true;
}
}
if (!hasPrimaryID)
errors.rejectValue("participant.identifiers", "REQUIRED", "Please Include exactly One Primary Identifier");
}
public OrganizationRepository getOrganizationRepository() {
return organizationRepository;
}
public void setOrganizationRepository(OrganizationRepository organizationRepository) {
this.organizationRepository = organizationRepository;
}
public void setListValues(final ListValues listValues) {
this.listValues = listValues;
}
public ConfigProperty getConfigurationProperty() {
return configurationProperty;
}
public void setConfigurationProperty(ConfigProperty configurationProperty) {
this.configurationProperty = configurationProperty;
}
public ModelAndView addOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
System.out.println("addOrganizationIdentifier");
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<OrganizationAssignedIdentifier> list = command.getParticipant().getOrganizationIdentifiers();
// store the new index for the new Identifier
int size = list.size();
Integer[] indexes = new Integer[]{size};
modelAndView.getModel().put("indexes", indexes);
// store the new Identifier object into the command.participant
OrganizationAssignedIdentifier newIdentifier = new OrganizationAssignedIdentifier();
command.getParticipant().addIdentifier(newIdentifier);
System.out.println("org size after add: " + command.getParticipant().getOrganizationIdentifiers().size());
return modelAndView;
}
public ModelAndView removeOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
System.out.println("removeOrganizationIdentifier");
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<OrganizationAssignedIdentifier> list = command.getParticipant().getOrganizationIdentifiers();
list.remove(list.get(command.getIndex()));
// update the array of remainning indexes after deleting
int size = list.size();
Integer[] indexes = new Integer[size];
for (int i = 0; i < size; i++) {
indexes[i] = i;
}
System.out.println("org size after delete: " + command.getParticipant().getOrganizationIdentifiers().size());
modelAndView.getModel().put("indexes", indexes);
return modelAndView;
}
public ModelAndView addSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
System.out.println("addSystemIdentifier");
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<SystemAssignedIdentifier> list = command.getParticipant().getSystemAssignedIdentifiers();
// store the new index for the new Identifier
int size = list.size();
Integer[] indexes = new Integer[]{size};
modelAndView.getModel().put("indexes", indexes);
// store the new Identifier object into the command.participant
SystemAssignedIdentifier newIdentifier = new SystemAssignedIdentifier();
command.getParticipant().addIdentifier(newIdentifier);
System.out.println("sys size after add: " + command.getParticipant().getSystemAssignedIdentifiers().size());
return modelAndView;
}
public ModelAndView removeSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
System.out.println("removeSystemIdentifier");
Map<String, Boolean> map = new HashMap<String, Boolean>();
ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
ParticipantInputCommand command = (ParticipantInputCommand) cmd;
List<SystemAssignedIdentifier> list = command.getParticipant().getSystemAssignedIdentifiers();
System.out.println("before delete: " + list.size());
list.remove(list.get(command.getIndex()));
System.out.println("after delete:" + list.size());
// update the array of remainning indexes after deleting
int size = list.size();
Integer[] indexes = new Integer[size];
for (int i = 0; i < size; i++) {
indexes[i] = i;
}
System.out.println("sys size after delete: " + command.getParticipant().getSystemAssignedIdentifiers().size());
modelAndView.getModel().put("indexes", indexes);
return modelAndView;
}
}
|
SVN-Revision: 6827
| projects/web/src/main/java/gov/nih/nci/cabig/caaers/web/participant/CreateParticipantTab.java | <ide><path>rojects/web/src/main/java/gov/nih/nci/cabig/caaers/web/participant/CreateParticipantTab.java
<ide> repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
<ide> repeatingFieldGroupFactorySys.addField(InputFieldFactory.createSelectField("type", "Identifier Type", true, options));
<ide>
<del>
<ide> repeatingFieldGroupFactoryOrg.addField(InputFieldFactory.createAutocompleterField("organization", "Organization Identifier", true));
<ide> repeatingFieldGroupFactorySys.addField(InputFieldFactory.createTextField("systemName", "System Name", true));
<ide>
<ide> }
<ide>
<ide> public ModelAndView addOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
<del> System.out.println("addOrganizationIdentifier");
<ide> Map<String, Boolean> map = new HashMap<String, Boolean>();
<ide> ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
<ide>
<ide> OrganizationAssignedIdentifier newIdentifier = new OrganizationAssignedIdentifier();
<ide> command.getParticipant().addIdentifier(newIdentifier);
<ide>
<del> System.out.println("org size after add: " + command.getParticipant().getOrganizationIdentifiers().size());
<del>
<ide> return modelAndView;
<ide> }
<ide>
<ide> public ModelAndView removeOrganizationIdentifier(HttpServletRequest request, Object cmd, Errors error) {
<del> System.out.println("removeOrganizationIdentifier");
<ide> Map<String, Boolean> map = new HashMap<String, Boolean>();
<ide> ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
<ide>
<ide> for (int i = 0; i < size; i++) {
<ide> indexes[i] = i;
<ide> }
<del> System.out.println("org size after delete: " + command.getParticipant().getOrganizationIdentifiers().size());
<ide>
<ide> modelAndView.getModel().put("indexes", indexes);
<ide> return modelAndView;
<ide> }
<ide>
<ide> public ModelAndView addSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
<del> System.out.println("addSystemIdentifier");
<ide> Map<String, Boolean> map = new HashMap<String, Boolean>();
<ide> ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
<ide>
<ide> SystemAssignedIdentifier newIdentifier = new SystemAssignedIdentifier();
<ide> command.getParticipant().addIdentifier(newIdentifier);
<ide>
<del> System.out.println("sys size after add: " + command.getParticipant().getSystemAssignedIdentifiers().size());
<del>
<ide> return modelAndView;
<ide> }
<ide>
<ide> public ModelAndView removeSystemIdentifier(HttpServletRequest request, Object cmd, Errors error) {
<del> System.out.println("removeSystemIdentifier");
<ide> Map<String, Boolean> map = new HashMap<String, Boolean>();
<ide> ModelAndView modelAndView = new ModelAndView(getAjaxViewName(request), map);
<ide>
<ide> ParticipantInputCommand command = (ParticipantInputCommand) cmd;
<ide> List<SystemAssignedIdentifier> list = command.getParticipant().getSystemAssignedIdentifiers();
<ide>
<del> System.out.println("before delete: " + list.size());
<ide> list.remove(list.get(command.getIndex()));
<del> System.out.println("after delete:" + list.size());
<ide>
<ide> // update the array of remainning indexes after deleting
<ide> int size = list.size();
<ide> for (int i = 0; i < size; i++) {
<ide> indexes[i] = i;
<ide> }
<del> System.out.println("sys size after delete: " + command.getParticipant().getSystemAssignedIdentifiers().size());
<ide>
<ide> modelAndView.getModel().put("indexes", indexes);
<ide> return modelAndView; |
||
Java | apache-2.0 | a8671b2a7b0c0fa6e5783fe082eea6f5a92a47df | 0 | HanSolo/Medusa | /*
* Copyright (c) 2016 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.medusa.skins;
import eu.hansolo.medusa.Fonts;
import eu.hansolo.medusa.Gauge;
import eu.hansolo.medusa.Section;
import eu.hansolo.medusa.tools.Helper;
import javafx.geometry.Insets;
import javafx.scene.control.Skin;
import javafx.scene.control.SkinBase;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.Border;
import javafx.scene.layout.BorderStroke;
import javafx.scene.layout.BorderStrokeStyle;
import javafx.scene.layout.BorderWidths;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.scene.text.Text;
import java.util.List;
import java.util.Locale;
/**
* Created by hansolo on 30.11.16.
*/
public class TileTextKpiSkin extends SkinBase<Gauge> implements Skin<Gauge> {
private static final double PREFERRED_WIDTH = 250;
private static final double PREFERRED_HEIGHT = 250;
private static final double MINIMUM_WIDTH = 50;
private static final double MINIMUM_HEIGHT = 50;
private static final double MAXIMUM_WIDTH = 1024;
private static final double MAXIMUM_HEIGHT = 1024;
private double size;
private Region barBackground;
private Rectangle barClip;
private Rectangle bar;
private Text titleText;
private Text valueText;
private Text unitText;
private Text percentageText;
private Text percentageUnitText;
private Rectangle maxValueRect;
private Text maxValueText;
private Pane pane;
private double minValue;
private double range;
private double stepSize;
private String formatString;
private Locale locale;
private List<Section> sections;
private boolean sectionsVisible;
private Color barColor;
// ******************** Constructors **************************************
public TileTextKpiSkin(Gauge gauge) {
super(gauge);
if (gauge.isAutoScale()) gauge.calcAutoScale();
minValue = gauge.getMinValue();
range = gauge.getRange();
stepSize = PREFERRED_WIDTH / range;
formatString = new StringBuilder("%.").append(Integer.toString(gauge.getDecimals())).append("f").toString();
locale = gauge.getLocale();
sections = gauge.getSections();
sectionsVisible = gauge.getSectionsVisible();
barColor = gauge.getBarColor();
initGraphics();
registerListeners();
setBar(gauge.getCurrentValue());
}
// ******************** Initialization ************************************
private void initGraphics() {
// Set initial size
if (Double.compare(getSkinnable().getPrefWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getPrefHeight(), 0.0) <= 0 ||
Double.compare(getSkinnable().getWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getHeight(), 0.0) <= 0) {
if (getSkinnable().getPrefWidth() > 0 && getSkinnable().getPrefHeight() > 0) {
getSkinnable().setPrefSize(getSkinnable().getPrefWidth(), getSkinnable().getPrefHeight());
} else {
getSkinnable().setPrefSize(PREFERRED_WIDTH, PREFERRED_HEIGHT);
}
}
barBackground = new Region();
barBackground.setBackground(new Background(new BackgroundFill(getSkinnable().getBarBackgroundColor(), new CornerRadii(0.0, 0.0, 0.025, 0.025, true), Insets.EMPTY)));
barClip = new Rectangle();
bar = new Rectangle();
bar.setFill(getSkinnable().getBarColor());
bar.setStroke(null);
bar.setClip(barClip);
titleText = new Text();
titleText.setFill(getSkinnable().getTitleColor());
Helper.enableNode(titleText, !getSkinnable().getTitle().isEmpty());
valueText = new Text();
valueText.setFill(getSkinnable().getValueColor());
Helper.enableNode(valueText, getSkinnable().isValueVisible());
unitText = new Text(getSkinnable().getUnit());
unitText.setFill(getSkinnable().getUnitColor());
Helper.enableNode(unitText, !getSkinnable().getUnit().isEmpty());
percentageText = new Text();
percentageText.setFill(getSkinnable().getBarColor());
percentageUnitText = new Text("%");
percentageUnitText.setFill(getSkinnable().getBarColor());
maxValueRect = new Rectangle();
maxValueRect.setFill(getSkinnable().getThresholdColor());
maxValueText = new Text();
maxValueText.setFill(getSkinnable().getBackgroundPaint());
pane = new Pane(barBackground, bar, titleText, valueText, unitText, percentageText, percentageUnitText, maxValueRect, maxValueText);
pane.setBorder(new Border(new BorderStroke(getSkinnable().getBorderPaint(), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(getSkinnable().getBorderWidth()))));
pane.setBackground(new Background(new BackgroundFill(getSkinnable().getBackgroundPaint(), CornerRadii.EMPTY, Insets.EMPTY)));
getChildren().setAll(pane);
}
private void registerListeners() {
getSkinnable().widthProperty().addListener(o -> handleEvents("RESIZE"));
getSkinnable().heightProperty().addListener(o -> handleEvents("RESIZE"));
getSkinnable().setOnUpdate(e -> handleEvents(e.eventType.name()));
getSkinnable().currentValueProperty().addListener(o -> setBar(getSkinnable().getCurrentValue()));
}
// ******************** Methods *******************************************
@Override protected double computeMinWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MINIMUM_WIDTH; }
@Override protected double computeMinHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MINIMUM_HEIGHT; }
@Override protected double computePrefWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return super.computePrefWidth(HEIGHT, TOP, RIGHT, BOTTOM, LEFT); }
@Override protected double computePrefHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return super.computePrefHeight(WIDTH, TOP, RIGHT, BOTTOM, LEFT); }
@Override protected double computeMaxWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MAXIMUM_WIDTH; }
@Override protected double computeMaxHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MAXIMUM_HEIGHT; }
private void handleEvents(final String EVENT_TYPE) {
if ("RESIZE".equals(EVENT_TYPE)) {
resize();
redraw();
} else if ("REDRAW".equals(EVENT_TYPE)) {
redraw();
} else if ("RECALC".equals(EVENT_TYPE)) {
minValue = getSkinnable().getMinValue();
range = getSkinnable().getRange();
stepSize = size / range;
redraw();
} else if ("VISIBLITY".equals(EVENT_TYPE)) {
Helper.enableNode(titleText, !getSkinnable().getTitle().isEmpty());
Helper.enableNode(valueText, getSkinnable().isValueVisible());
Helper.enableNode(unitText, !getSkinnable().getUnit().isEmpty());
} else if ("SECTION".equals(EVENT_TYPE)) {
sections = getSkinnable().getSections();
}
}
private void setBar(final double VALUE) {
double targetValue = (VALUE - minValue) * stepSize;
bar.setWidth(targetValue);
valueText.setText(String.format(locale, formatString, VALUE));
percentageText.setText(String.format(locale, formatString, ((VALUE - minValue) / range * 100)));
resizeDynamicText();
if (sectionsVisible && !sections.isEmpty()) { setBarColor(VALUE); }
}
private void setBarColor(final double VALUE) {
Color color = barColor;
for(Section section : sections) {
if (section.contains(VALUE)) {
color = section.getColor();
break;
}
}
bar.setFill(color);
percentageText.setFill(color);
percentageUnitText.setFill(color);
}
// ******************** Resizing ******************************************
private void resizeDynamicText() {
double maxWidth = 0.9 * size;
double fontSize = 0.24 * size;
valueText.setFont(Fonts.latoRegular(fontSize));
if (valueText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(valueText, maxWidth, fontSize); }
valueText.relocate(size * 0.925 - valueText.getLayoutBounds().getWidth() - unitText.getLayoutBounds().getWidth(), size * 0.18);
percentageUnitText.relocate(percentageText.getLayoutBounds().getMaxX() + size * 0.075, size * 0.765);
}
private void resizeStaticText() {
double maxWidth = 0.98 * size;
double fontSize = size * 0.06;
titleText.setFont(Fonts.latoRegular(fontSize));
if (titleText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(titleText, maxWidth, fontSize); }
titleText.relocate(size * 0.05, size * 0.05);
unitText.setFont(Fonts.latoRegular(fontSize));
if (unitText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(unitText, maxWidth, fontSize); }
unitText.relocate(size * 0.95 - unitText.getLayoutBounds().getWidth(), size * 0.3575);
maxWidth = size * 0.45;
fontSize = size * 0.18;
percentageText.setFont(Fonts.latoRegular(fontSize));
if (percentageText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(percentageText, maxWidth, fontSize); }
percentageText.relocate(size * 0.05, size * 0.705);
maxWidth = size * 0.1;
fontSize = size * 0.12;
percentageUnitText.setFont(Fonts.latoRegular(fontSize));
if (percentageUnitText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(percentageUnitText, maxWidth, fontSize); }
percentageUnitText.relocate(percentageText.getLayoutBounds().getMaxX() + size * 0.075, size * 0.765);
maxWidth = size * 0.45;
fontSize = size * 0.09;
maxValueText.setFont(Fonts.latoRegular(fontSize));
if (maxValueText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(maxValueText, maxWidth, fontSize); }
maxValueText.setX((size * 0.925) - maxValueText.getLayoutBounds().getWidth());
maxValueText.setY(size * 0.865);
}
private void resize() {
double width = getSkinnable().getWidth() - getSkinnable().getInsets().getLeft() - getSkinnable().getInsets().getRight();
double height = getSkinnable().getHeight() - getSkinnable().getInsets().getTop() - getSkinnable().getInsets().getBottom();
size = width < height ? width : height;
stepSize = size / range;
if (width > 0 && height > 0) {
pane.setMaxSize(size, size);
pane.relocate((width - size) * 0.5, (height - size) * 0.5);
barBackground.setPrefSize(size, size * 0.035);
barBackground.relocate(0, size * 0.965);
barClip.setX(0);
barClip.setY(size * 0.95);
barClip.setWidth(size);
barClip.setHeight(size * 0.05);
barClip.setArcWidth(size * 0.025);
barClip.setArcHeight(size * 0.025);
bar.setX(0);
bar.setY(size * 0.965);
bar.setWidth(getSkinnable().getValue() * stepSize);
bar.setHeight(size * 0.035);
resizeStaticText();
resizeDynamicText();
maxValueRect.setWidth(maxValueText.getLayoutBounds().getWidth() + size * 0.05);
maxValueRect.setHeight(maxValueText.getLayoutBounds().getHeight());
maxValueRect.setX((size * 0.95) - maxValueRect.getWidth());
maxValueRect.setY(size * 0.7775);
maxValueRect.setArcWidth(size * 0.025);
maxValueRect.setArcHeight(size * 0.025);
}
}
private void redraw() {
pane.setBorder(new Border(new BorderStroke(getSkinnable().getBorderPaint(), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(getSkinnable().getBorderWidth() / PREFERRED_WIDTH * size))));
pane.setBackground(new Background(new BackgroundFill(getSkinnable().getBackgroundPaint(), new CornerRadii(size * 0.025), Insets.EMPTY)));
locale = getSkinnable().getLocale();
formatString = new StringBuilder("%.").append(Integer.toString(getSkinnable().getDecimals())).append("f").toString();
titleText.setText(getSkinnable().getTitle());
percentageText.setText(String.format(locale, "%." + getSkinnable().getTickLabelDecimals() + "f", getSkinnable().getValue() / range * 100));
maxValueText.setText(String.format(locale, "%." + getSkinnable().getTickLabelDecimals() + "f", getSkinnable().getMaxValue()));
resizeStaticText();
barBackground.setBackground(new Background(new BackgroundFill(getSkinnable().getBarBackgroundColor().brighter().brighter(), new CornerRadii(0.0, 0.0, size * 0.025, size * 0.025, false), Insets.EMPTY)));
barColor = getSkinnable().getBarColor();
if (sectionsVisible && !sections.isEmpty()) {
setBarColor(getSkinnable().getValue());
} else {
bar.setFill(barColor);
}
titleText.setFill(getSkinnable().getTitleColor());
maxValueText.setFill(getSkinnable().getBackgroundPaint());
valueText.setFill(getSkinnable().getValueColor());
unitText.setFill(getSkinnable().getUnitColor());
}
}
| src/main/java/eu/hansolo/medusa/skins/TileTextKpiSkin.java | /*
* Copyright (c) 2016 by Gerrit Grunwald
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.medusa.skins;
import eu.hansolo.medusa.Fonts;
import eu.hansolo.medusa.Gauge;
import eu.hansolo.medusa.Section;
import eu.hansolo.medusa.tools.Helper;
import javafx.geometry.Insets;
import javafx.scene.control.Skin;
import javafx.scene.control.SkinBase;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.Border;
import javafx.scene.layout.BorderStroke;
import javafx.scene.layout.BorderStrokeStyle;
import javafx.scene.layout.BorderWidths;
import javafx.scene.layout.CornerRadii;
import javafx.scene.layout.Pane;
import javafx.scene.layout.Region;
import javafx.scene.paint.Color;
import javafx.scene.shape.Rectangle;
import javafx.scene.text.Text;
import javafx.scene.text.TextAlignment;
import javafx.scene.text.TextFlow;
import java.util.List;
import java.util.Locale;
/**
* Created by hansolo on 30.11.16.
*/
public class TileTextKpiSkin extends SkinBase<Gauge> implements Skin<Gauge> {
private static final double PREFERRED_WIDTH = 250;
private static final double PREFERRED_HEIGHT = 250;
private static final double MINIMUM_WIDTH = 50;
private static final double MINIMUM_HEIGHT = 50;
private static final double MAXIMUM_WIDTH = 1024;
private static final double MAXIMUM_HEIGHT = 1024;
private double size;
private Region barBackground;
private Rectangle barClip;
private Rectangle bar;
private Text titleText;
private Text valueText;
private Text unitText;
private TextFlow textContainer;
private Text percentageText;
private Text percentageUnitText;
private Rectangle maxValueRect;
private Text maxValueText;
private Pane pane;
private double range;
private double stepSize;
private String formatString;
private Locale locale;
private List<Section> sections;
private boolean sectionsVisible;
private Color barColor;
// ******************** Constructors **************************************
public TileTextKpiSkin(Gauge gauge) {
super(gauge);
if (gauge.isAutoScale()) gauge.calcAutoScale();
range = gauge.getRange();
stepSize = PREFERRED_WIDTH / range;
formatString = new StringBuilder("%.").append(Integer.toString(gauge.getDecimals())).append("f").toString();
locale = gauge.getLocale();
sections = gauge.getSections();
sectionsVisible = gauge.getSectionsVisible();
barColor = gauge.getBarColor();
initGraphics();
registerListeners();
setBar(gauge.getCurrentValue());
}
// ******************** Initialization ************************************
private void initGraphics() {
// Set initial size
if (Double.compare(getSkinnable().getPrefWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getPrefHeight(), 0.0) <= 0 ||
Double.compare(getSkinnable().getWidth(), 0.0) <= 0 || Double.compare(getSkinnable().getHeight(), 0.0) <= 0) {
if (getSkinnable().getPrefWidth() > 0 && getSkinnable().getPrefHeight() > 0) {
getSkinnable().setPrefSize(getSkinnable().getPrefWidth(), getSkinnable().getPrefHeight());
} else {
getSkinnable().setPrefSize(PREFERRED_WIDTH, PREFERRED_HEIGHT);
}
}
barBackground = new Region();
barBackground.setBackground(new Background(new BackgroundFill(getSkinnable().getBarBackgroundColor(), new CornerRadii(0.0, 0.0, 0.025, 0.025, true), Insets.EMPTY)));
barClip = new Rectangle();
bar = new Rectangle();
bar.setFill(getSkinnable().getBarColor());
bar.setStroke(null);
bar.setClip(barClip);
titleText = new Text();
titleText.setFill(getSkinnable().getTitleColor());
Helper.enableNode(titleText, !getSkinnable().getTitle().isEmpty());
valueText = new Text();
valueText.setFill(getSkinnable().getValueColor());
Helper.enableNode(valueText, getSkinnable().isValueVisible());
unitText = new Text(getSkinnable().getUnit());
unitText.setFill(getSkinnable().getUnitColor());
Helper.enableNode(unitText, !getSkinnable().getUnit().isEmpty());
textContainer = new TextFlow(valueText, unitText);
textContainer.setTextAlignment(TextAlignment.RIGHT);
textContainer.setPrefWidth(PREFERRED_WIDTH * 0.9);
percentageText = new Text();
percentageText.setFill(getSkinnable().getBarColor());
percentageUnitText = new Text("%");
percentageUnitText.setFill(getSkinnable().getBarColor());
maxValueRect = new Rectangle();
maxValueRect.setFill(getSkinnable().getThresholdColor());
maxValueText = new Text();
maxValueText.setFill(getSkinnable().getBackgroundPaint());
pane = new Pane(barBackground, bar, titleText, textContainer, percentageText, percentageUnitText, maxValueRect, maxValueText);
pane.setBorder(new Border(new BorderStroke(getSkinnable().getBorderPaint(), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(getSkinnable().getBorderWidth()))));
pane.setBackground(new Background(new BackgroundFill(getSkinnable().getBackgroundPaint(), CornerRadii.EMPTY, Insets.EMPTY)));
getChildren().setAll(pane);
}
private void registerListeners() {
getSkinnable().widthProperty().addListener(o -> handleEvents("RESIZE"));
getSkinnable().heightProperty().addListener(o -> handleEvents("RESIZE"));
getSkinnable().setOnUpdate(e -> handleEvents(e.eventType.name()));
getSkinnable().currentValueProperty().addListener(o -> setBar(getSkinnable().getCurrentValue()));
}
// ******************** Methods *******************************************
@Override protected double computeMinWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MINIMUM_WIDTH; }
@Override protected double computeMinHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MINIMUM_HEIGHT; }
@Override protected double computePrefWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return super.computePrefWidth(HEIGHT, TOP, RIGHT, BOTTOM, LEFT); }
@Override protected double computePrefHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return super.computePrefHeight(WIDTH, TOP, RIGHT, BOTTOM, LEFT); }
@Override protected double computeMaxWidth(final double HEIGHT, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MAXIMUM_WIDTH; }
@Override protected double computeMaxHeight(final double WIDTH, final double TOP, final double RIGHT, final double BOTTOM, final double LEFT) { return MAXIMUM_HEIGHT; }
private void handleEvents(final String EVENT_TYPE) {
if ("RESIZE".equals(EVENT_TYPE)) {
resize();
redraw();
} else if ("REDRAW".equals(EVENT_TYPE)) {
redraw();
} else if ("RECALC".equals(EVENT_TYPE)) {
range = getSkinnable().getRange();
stepSize = size / range;
redraw();
} else if ("VISIBLITY".equals(EVENT_TYPE)) {
Helper.enableNode(titleText, !getSkinnable().getTitle().isEmpty());
Helper.enableNode(valueText, getSkinnable().isValueVisible());
Helper.enableNode(unitText, !getSkinnable().getUnit().isEmpty());
} else if ("SECTION".equals(EVENT_TYPE)) {
sections = getSkinnable().getSections();
}
}
private void setBar(final double VALUE) {
double targetValue = VALUE * stepSize;
bar.setWidth(targetValue);
valueText.setText(String.format(locale, formatString, VALUE));
percentageText.setText(String.format(locale, formatString, (VALUE / range * 100)));
resizeDynamicText();
if (sectionsVisible && !sections.isEmpty()) { setBarColor(VALUE); }
}
private void setBarColor(final double VALUE) {
Color color = barColor;
for(Section section : sections) {
if (section.contains(VALUE)) {
color = section.getColor();
break;
}
}
bar.setFill(color);
percentageText.setFill(color);
percentageUnitText.setFill(color);
}
// ******************** Resizing ******************************************
private void resizeDynamicText() {
double maxWidth = 0.9 * size;
double fontSize = 0.24 * size;
valueText.setFont(Fonts.latoRegular(fontSize));
if (valueText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(valueText, maxWidth, fontSize); }
percentageUnitText.relocate(percentageText.getLayoutBounds().getMaxX() + size * 0.075, size * 0.765);
}
private void resizeStaticText() {
double maxWidth = 0.98 * size;
double fontSize = size * 0.06;
titleText.setFont(Fonts.latoRegular(fontSize));
if (titleText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(titleText, maxWidth, fontSize); }
titleText.relocate(size * 0.05, size * 0.05);
unitText.setFont(Fonts.latoRegular(fontSize));
if (unitText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(unitText, maxWidth, fontSize); }
maxWidth = size * 0.45;
fontSize = size * 0.18;
percentageText.setFont(Fonts.latoRegular(fontSize));
if (percentageText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(percentageText, maxWidth, fontSize); }
percentageText.relocate(size * 0.05, size * 0.705);
maxWidth = size * 0.1;
fontSize = size * 0.12;
percentageUnitText.setFont(Fonts.latoRegular(fontSize));
if (percentageUnitText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(percentageUnitText, maxWidth, fontSize); }
percentageUnitText.relocate(percentageText.getLayoutBounds().getMaxX() + size * 0.075, size * 0.765);
maxWidth = size * 0.45;
fontSize = size * 0.09;
maxValueText.setFont(Fonts.latoRegular(fontSize));
if (maxValueText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(maxValueText, maxWidth, fontSize); }
maxValueText.setX((size * 0.925) - maxValueText.getLayoutBounds().getWidth());
maxValueText.setY(size * 0.865);
}
private void resize() {
double width = getSkinnable().getWidth() - getSkinnable().getInsets().getLeft() - getSkinnable().getInsets().getRight();
double height = getSkinnable().getHeight() - getSkinnable().getInsets().getTop() - getSkinnable().getInsets().getBottom();
size = width < height ? width : height;
stepSize = size / range;
if (width > 0 && height > 0) {
pane.setMaxSize(size, size);
pane.relocate((width - size) * 0.5, (height - size) * 0.5);
barBackground.setPrefSize(size, size * 0.035);
barBackground.relocate(0, size * 0.965);
barClip.setX(0);
barClip.setY(size * 0.95);
barClip.setWidth(size);
barClip.setHeight(size * 0.05);
barClip.setArcWidth(size * 0.025);
barClip.setArcHeight(size * 0.025);
bar.setX(0);
bar.setY(size * 0.965);
bar.setWidth(getSkinnable().getValue() * stepSize);
bar.setHeight(size * 0.035);
textContainer.setPrefWidth(size * 0.9);
textContainer.relocate(size * 0.05, size * 0.18);
resizeStaticText();
resizeDynamicText();
maxValueRect.setWidth(maxValueText.getLayoutBounds().getWidth() + size * 0.05);
maxValueRect.setHeight(maxValueText.getLayoutBounds().getHeight());
maxValueRect.setX((size * 0.95) - maxValueRect.getWidth());
maxValueRect.setY(size * 0.7775);
maxValueRect.setArcWidth(size * 0.025);
maxValueRect.setArcHeight(size * 0.025);
}
}
private void redraw() {
pane.setBorder(new Border(new BorderStroke(getSkinnable().getBorderPaint(), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(getSkinnable().getBorderWidth() / PREFERRED_WIDTH * size))));
pane.setBackground(new Background(new BackgroundFill(getSkinnable().getBackgroundPaint(), new CornerRadii(size * 0.025), Insets.EMPTY)));
locale = getSkinnable().getLocale();
formatString = new StringBuilder("%.").append(Integer.toString(getSkinnable().getDecimals())).append("f").toString();
titleText.setText(getSkinnable().getTitle());
percentageText.setText(String.format(locale, "%." + getSkinnable().getTickLabelDecimals() + "f", getSkinnable().getValue() / range * 100));
maxValueText.setText(String.format(locale, "%." + getSkinnable().getTickLabelDecimals() + "f", getSkinnable().getMaxValue()));
resizeStaticText();
barBackground.setBackground(new Background(new BackgroundFill(getSkinnable().getBarBackgroundColor().brighter().brighter(), new CornerRadii(0.0, 0.0, size * 0.025, size * 0.025, false), Insets.EMPTY)));
barColor = getSkinnable().getBarColor();
if (sectionsVisible && !sections.isEmpty()) {
setBarColor(getSkinnable().getValue());
} else {
bar.setFill(barColor);
}
titleText.setFill(getSkinnable().getTitleColor());
maxValueText.setFill(getSkinnable().getBackgroundPaint());
valueText.setFill(getSkinnable().getValueColor());
unitText.setFill(getSkinnable().getUnitColor());
}
}
| Moved back from TextFlow to manual placement and fixed problem with negative sections and values
| src/main/java/eu/hansolo/medusa/skins/TileTextKpiSkin.java | Moved back from TextFlow to manual placement and fixed problem with negative sections and values | <ide><path>rc/main/java/eu/hansolo/medusa/skins/TileTextKpiSkin.java
<ide> import javafx.scene.paint.Color;
<ide> import javafx.scene.shape.Rectangle;
<ide> import javafx.scene.text.Text;
<del>import javafx.scene.text.TextAlignment;
<del>import javafx.scene.text.TextFlow;
<ide>
<ide> import java.util.List;
<ide> import java.util.Locale;
<ide> private Text titleText;
<ide> private Text valueText;
<ide> private Text unitText;
<del> private TextFlow textContainer;
<ide> private Text percentageText;
<ide> private Text percentageUnitText;
<ide> private Rectangle maxValueRect;
<ide> private Text maxValueText;
<ide> private Pane pane;
<add> private double minValue;
<ide> private double range;
<ide> private double stepSize;
<ide> private String formatString;
<ide> public TileTextKpiSkin(Gauge gauge) {
<ide> super(gauge);
<ide> if (gauge.isAutoScale()) gauge.calcAutoScale();
<add> minValue = gauge.getMinValue();
<ide> range = gauge.getRange();
<ide> stepSize = PREFERRED_WIDTH / range;
<ide> formatString = new StringBuilder("%.").append(Integer.toString(gauge.getDecimals())).append("f").toString();
<ide> unitText.setFill(getSkinnable().getUnitColor());
<ide> Helper.enableNode(unitText, !getSkinnable().getUnit().isEmpty());
<ide>
<del> textContainer = new TextFlow(valueText, unitText);
<del> textContainer.setTextAlignment(TextAlignment.RIGHT);
<del> textContainer.setPrefWidth(PREFERRED_WIDTH * 0.9);
<del>
<ide> percentageText = new Text();
<ide> percentageText.setFill(getSkinnable().getBarColor());
<ide>
<ide> maxValueText = new Text();
<ide> maxValueText.setFill(getSkinnable().getBackgroundPaint());
<ide>
<del> pane = new Pane(barBackground, bar, titleText, textContainer, percentageText, percentageUnitText, maxValueRect, maxValueText);
<add> pane = new Pane(barBackground, bar, titleText, valueText, unitText, percentageText, percentageUnitText, maxValueRect, maxValueText);
<ide> pane.setBorder(new Border(new BorderStroke(getSkinnable().getBorderPaint(), BorderStrokeStyle.SOLID, CornerRadii.EMPTY, new BorderWidths(getSkinnable().getBorderWidth()))));
<ide> pane.setBackground(new Background(new BackgroundFill(getSkinnable().getBackgroundPaint(), CornerRadii.EMPTY, Insets.EMPTY)));
<ide>
<ide> } else if ("REDRAW".equals(EVENT_TYPE)) {
<ide> redraw();
<ide> } else if ("RECALC".equals(EVENT_TYPE)) {
<add> minValue = getSkinnable().getMinValue();
<ide> range = getSkinnable().getRange();
<ide> stepSize = size / range;
<ide> redraw();
<ide> }
<ide>
<ide> private void setBar(final double VALUE) {
<del> double targetValue = VALUE * stepSize;
<add> double targetValue = (VALUE - minValue) * stepSize;
<ide> bar.setWidth(targetValue);
<ide> valueText.setText(String.format(locale, formatString, VALUE));
<del> percentageText.setText(String.format(locale, formatString, (VALUE / range * 100)));
<add> percentageText.setText(String.format(locale, formatString, ((VALUE - minValue) / range * 100)));
<ide> resizeDynamicText();
<ide> if (sectionsVisible && !sections.isEmpty()) { setBarColor(VALUE); }
<ide> }
<ide> double fontSize = 0.24 * size;
<ide> valueText.setFont(Fonts.latoRegular(fontSize));
<ide> if (valueText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(valueText, maxWidth, fontSize); }
<add> valueText.relocate(size * 0.925 - valueText.getLayoutBounds().getWidth() - unitText.getLayoutBounds().getWidth(), size * 0.18);
<add>
<ide> percentageUnitText.relocate(percentageText.getLayoutBounds().getMaxX() + size * 0.075, size * 0.765);
<ide> }
<ide> private void resizeStaticText() {
<ide>
<ide> unitText.setFont(Fonts.latoRegular(fontSize));
<ide> if (unitText.getLayoutBounds().getWidth() > maxWidth) { Helper.adjustTextSize(unitText, maxWidth, fontSize); }
<add> unitText.relocate(size * 0.95 - unitText.getLayoutBounds().getWidth(), size * 0.3575);
<ide>
<ide> maxWidth = size * 0.45;
<ide> fontSize = size * 0.18;
<ide> bar.setWidth(getSkinnable().getValue() * stepSize);
<ide> bar.setHeight(size * 0.035);
<ide>
<del> textContainer.setPrefWidth(size * 0.9);
<del> textContainer.relocate(size * 0.05, size * 0.18);
<del>
<ide> resizeStaticText();
<ide> resizeDynamicText();
<ide> |
|
Java | apache-2.0 | ff1d92e4d7a472fb54480e8c0368af54d04d1bfe | 0 | jacksonic/vjlofvhjfgm,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,foam-framework/foam2 | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class SingletonFactory
implements XFactory
{
XFactory delegate_ = null;
Object instance_;
public SingletonFactory(XFactory delegate) {
delegate_ = delegate;
}
public synchronized Object create(X x) {
if ( delegate_ != null ) {
instance_ = delegate_.create(x);
delegate_ = null;
}
return instance_;
}
}
| src/foam/core/SingletonFactory.java | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.core;
public class SingletonFactory
implements XFactory
{
XFactory delegate_ = null;
Object instance_;
public SingletonFactory(XFactory delegate) {
delegate_ = delegate;
}
public Object create(X x) {
if ( delegate_ != null ) {
instance_ = delegate_.create(x);
delegate_ = null;
}
return instance_;
}
}
| Make SingletonFactory thread safe.
| src/foam/core/SingletonFactory.java | Make SingletonFactory thread safe. | <ide><path>rc/foam/core/SingletonFactory.java
<ide> delegate_ = delegate;
<ide> }
<ide>
<del> public Object create(X x) {
<add> public synchronized Object create(X x) {
<ide> if ( delegate_ != null ) {
<ide> instance_ = delegate_.create(x);
<ide> delegate_ = null; |
|
Java | apache-2.0 | 6e882f2455be7f3d99efd074e198ee881504ad49 | 0 | cobbzilla/cobbzilla-wizard | package org.cobbzilla.wizard.model.search;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.experimental.Accessors;
import static org.cobbzilla.util.daemon.ZillaRuntime.empty;
import static org.cobbzilla.util.reflect.ReflectionUtil.copy;
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate;
@NoArgsConstructor @AllArgsConstructor @Accessors(chain=true)
public class SearchBound {
public SearchBound(SearchBound bound) { copy(this, bound); }
@Getter @Setter private String name;
@Getter @Setter private SearchBoundComparison comparison;
@Getter @Setter private SearchFieldType type;
public boolean hasType () { return type != null; }
@Getter @Setter private String[] params;
public boolean hasParams () { return !empty(params); }
@Getter @Setter @JsonIgnore private String processorClass;
public boolean hasProcessor () { return !empty(processorClass); }
@JsonIgnore public <T extends CustomSearchBoundProcessor> T getProcessor() { return instantiate(processorClass); }
}
| wizard-common/src/main/java/org/cobbzilla/wizard/model/search/SearchBound.java | package org.cobbzilla.wizard.model.search;
import com.fasterxml.jackson.annotation.JsonIgnore;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.experimental.Accessors;
import static org.cobbzilla.util.daemon.ZillaRuntime.empty;
import static org.cobbzilla.util.reflect.ReflectionUtil.copy;
import static org.cobbzilla.util.reflect.ReflectionUtil.instantiate;
@NoArgsConstructor @AllArgsConstructor @Accessors(chain=true)
public class SearchBound {
@Getter @Setter private String name;
@Getter @Setter private SearchBoundComparison comparison;
@Getter @Setter private SearchFieldType type;
public SearchBound(SearchBound bound) { copy(this, bound); }
public boolean hasType () { return type != null; }
@Getter @Setter private String[] params;
public boolean hasParams () { return !empty(params); }
@Getter @Setter @JsonIgnore private String processorClass;
public boolean hasProcessor () { return !empty(processorClass); }
@JsonIgnore public <T extends CustomSearchBoundProcessor> T getProcessor() { return instantiate(processorClass); }
}
| move constructor to top
| wizard-common/src/main/java/org/cobbzilla/wizard/model/search/SearchBound.java | move constructor to top | <ide><path>izard-common/src/main/java/org/cobbzilla/wizard/model/search/SearchBound.java
<ide> @NoArgsConstructor @AllArgsConstructor @Accessors(chain=true)
<ide> public class SearchBound {
<ide>
<add> public SearchBound(SearchBound bound) { copy(this, bound); }
<add>
<ide> @Getter @Setter private String name;
<ide> @Getter @Setter private SearchBoundComparison comparison;
<ide>
<ide> @Getter @Setter private SearchFieldType type;
<del>
<del> public SearchBound(SearchBound bound) { copy(this, bound); }
<del>
<ide> public boolean hasType () { return type != null; }
<ide>
<ide> @Getter @Setter private String[] params; |
|
Java | bsd-3-clause | bf8403d50c551dc1601ff455462a1fadbbe10223 | 0 | tangentforks/piccolo2d.java | /*
* Copyright (c) 2008-2009, Piccolo2D project, http://piccolo2d.org
* Copyright (c) 1998-2008, University of Maryland
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* None of the name of the University of Maryland, the name of the Piccolo2D project, or the names of its
* contributors may be used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.umd.cs.piccolox.pswing;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ContainerAdapter;
import java.awt.event.ContainerEvent;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.RepaintManager;
import javax.swing.border.Border;
import edu.umd.cs.piccolo.PCamera;
import edu.umd.cs.piccolo.PLayer;
import edu.umd.cs.piccolo.PNode;
import edu.umd.cs.piccolo.util.PBounds;
import edu.umd.cs.piccolo.util.PPaintContext;
/*
This message was sent to Sun on August 27, 1999
-----------------------------------------------
We are currently developing Piccolo, a "scenegraph" for use in 2D graphics.
One of our ultimate goals is to support Swing lightweight components
within Piccolo, whose graphical space supports arbitray affine transforms.
The challenge in this pursuit is getting the components to respond and
render properly though not actually displayed in a standard Java component
hierarchy.
The first issues involved making the Swing components focusable and
showing. This was accomplished by adding the Swing components to a 0x0
JComponent which was in turn added to our main Piccolo application component.
To our good fortune, a Java component is showing merely if it and its
ancestors are showing and not based on whether it is ACTUALLY visible.
Likewise, focus in a JComponent depends merely on the component's
containing window having focus.
The second issue involved capturing the repaint calls on a Swing
component. Normally, for a repaint and the consequent call to
paintImmediately, a Swing component obtains the Graphics object necessary
to render itself through the Java component heirarchy. However, for Piccolo
we would like the component to render using a Graphics object that Piccolo
may have arbitrarily transformed in some way. By capturing in the
RepaintManager the repaint calls made on our special Swing components, we
are able to redirect the repaint requests through the Piccolo architecture to
put the Graphics in its proper context. Unfortunately, this means that
if the Swing component contains other Swing components, then any repaint
requests made by one of these nested components must go through
the Piccolo architecture then through the top level Swing component
down to the nested Swing component. This normally doesn't cause a
problem. However, if calling paint on one of these nested
children causes a call to repaint then an infinite loop ensues. This does
in fact happen in the Swing components that use cell renderers. Before
the cell renderer is painted, it is invalidated and consequently
repainted. We solved this problem by putting a lock on repaint calls for
a component while that component is painting. (A similar problem faced
the Swing team over this same issue. They solved it by inserting a
CellRendererPane to capture the renderer's invalidate calls.)
Another issue arose over the forwarding of mouse events to the Swing
components. Since our Swing components are not actually displayed on
screen in the standard manner, we must manually dispatch any MouseEvents
we want the component to receive. Hence, we needed to find the deepest
visible component at a particular location that accepts MouseEvents.
Finding the deepest visible component at a point was achieved with the
"findComponentAt" method in java.awt.Container. With the
"getListeners(Class listenerType)" method added in JDK1.3 Beta we are able
to determine if the component has any Mouse Listeners. However, we haven't
yet found a way to determine if MouseEvents have been specifically enabled
for a component. The package private method "eventEnabled" in
java.awt.Component does exactly what we want but is, of course,
inaccessible. In order to dispatch events correctly we would need a
public accessor to the method "boolean eventEnabled(AWTEvent)" in
java.awt.Component.
Still another issue involves the management of cursors when the mouse is
over a Swing component in our application. To the Java mechanisms, the
mouse never appears to enter the bounds of the Swing components since they
are contained by a 0x0 JComponent. Hence, we must manually change the
cursor when the mouse enters one of the Swing components in our
application. This generally works but becomes a problem if the Swing
component's cursor changes while we are over that Swing component (for
instance, if you resize a Table Column). In order to manage cursors
properly, we would need setCursor to fire property change events.
With the above fixes, most Swing components work. The only Swing
components that are definitely broken are ToolTips and those that rely on
JPopupMenu. In order to implement ToolTips properly, we would need to have
a method in ToolTipManager that allows us to set the current manager, as
is possible with RepaintManager. In order to implement JPopupMenu, we
will likely need to reimplement JPopupMenu to function in Piccolo with
a transformed Graphics and to insert itself in the proper place in the
Piccolo scenegraph.
*/
/**
* <b>PSwing</b> is used to add Swing Components to a Piccolo canvas.
* <p>
* Example: adding a swing JButton to a PCanvas:
*
* <pre>
* PSwingCanvas canvas = new PSwingCanvas();
* JButton button = new JButton("Button");
* swing = new PSwing(canvas, button);
* canvas.getLayer().addChild(swing);
*
* <pre>
* </p>
* <p>
* NOTE: PSwing has the current limitation that it does not listen for Container
* events. This is only an issue if you create a PSwing and later add Swing
* components to the PSwing's component hierarchy that do not have double
* buffering turned off or have a smaller font size than the minimum font size
* of the original PSwing's component hierarchy.
* </p>
* <p>
* For instance, the following bit of code will give unexpected results:
*
* <pre>
* JPanel panel = new JPanel();
* PSwing swing = new PSwing(panel);
* JPanel newChild = new JPanel();
* newChild.setDoubleBuffered(true);
* panel.add(newChild);
* </pre>
*
* </p>
* <p>
* NOTE: PSwing cannot be correctly interacted with through multiple cameras.
* There is no support for it yet.
* </p>
* <p>
* NOTE: PSwing is java.io.Serializable.
* </p>
* <p>
* <b>Warning:</b> Serialized objects of this class will not be compatible with
* future Piccolo releases. The current serialization support is appropriate for
* short term storage or RMI between applications running the same version of
* Piccolo. A future release of Piccolo will provide support for long term
* persistence.
* </p>
*
* @author Sam R. Reid
* @author Benjamin B. Bederson
* @author Lance E. Good
*
* 3-23-2007 edited to automatically detect PCamera/PSwingCanvas to
* allow single-arg constructor usage
*/
public class PSwing extends PNode implements Serializable, PropertyChangeListener {
/**
* Used as a hashtable key for this object in the Swing component's client
* properties.
*/
public static final String PSWING_PROPERTY = "PSwing";
private static PBounds TEMP_REPAINT_BOUNDS2 = new PBounds();
/**
* The cutoff at which the Swing component is rendered greek
*/
private double renderCutoff = 0.3;
private JComponent component = null;
private double minFontSize = Double.MAX_VALUE;
private Stroke defaultStroke = new BasicStroke();
private Font defaultFont = new Font("Serif", Font.PLAIN, 12);
private PSwingCanvas canvas;
// //////////////////////////////////////////////////////////
// /////Following fields are for automatic canvas/camera detection
// //////////////////////////////////////////////////////////
/*
* Keep track of which nodes we've attached listeners to since no built in
* support in PNode
*/
private ArrayList listeningTo = new ArrayList();
/* The parent listener for camera/canvas changes */
private PropertyChangeListener parentListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
PNode parent = (PNode) evt.getNewValue();
clearListeners((PNode) evt.getOldValue());
if (parent != null) {
listenForCanvas(parent);
}
else {
updateCanvas(null);
}
}
};
/**
* Constructs a new visual component wrapper for the Swing component.
*
* @param component The swing component to be wrapped
*/
public PSwing(JComponent component) {
this.component = component;
component.putClientProperty(PSWING_PROPERTY, this);
init(component);
component.revalidate();
component.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
reshape();
}
});
component.addComponentListener(new ComponentAdapter() {
public void componentHidden(ComponentEvent e) {
setVisible(false);
}
public void componentShown(ComponentEvent e) {
setVisible(true);
}
});
reshape();
listenForCanvas(this);
}
/**
* Deprecated constructor for application code still depending on this
* signature.
*
* @param pSwingCanvas
* @param component
* @deprecated
*/
public PSwing(PSwingCanvas pSwingCanvas, JComponent component) {
this(component);
}
/**
* Ensures the bounds of the underlying component are accurate, and sets the
* bounds of this PNode.
*/
void reshape() {
Border border = component.getBorder();
int width = (int) Math.max(component.getMinimumSize().width, component.getPreferredSize().width);
int height = (int) component.getPreferredSize().height;
if (border != null) {
Insets borderInsets = border.getBorderInsets(component);
width += borderInsets.left + borderInsets.right;
}
component.setBounds(0, 0, width, height);
setBounds(0, 0, width, height);
}
/**
* Determines if the Swing component should be rendered normally or as a
* filled rectangle.
* <p/>
* The transform, clip, and composite will be set appropriately when this
* object is rendered. It is up to this object to restore the transform,
* clip, and composite of the Graphics2D if this node changes any of them.
* However, the color, font, and stroke are unspecified by Piccolo. This
* object should set those things if they are used, but they do not need to
* be restored.
*
* @param renderContext Contains information about current render.
*/
public void paint(PPaintContext renderContext) {
Graphics2D g2 = renderContext.getGraphics();
if (defaultStroke == null) {
defaultStroke = new BasicStroke();
}
g2.setStroke(defaultStroke);
if (defaultFont == null) {
defaultFont = new Font("Serif", Font.PLAIN, 12);
}
g2.setFont(defaultFont);
if (component.getParent() == null) {
// pSwingCanvas.getSwingWrapper().add( component );
component.revalidate();
}
if (component instanceof JLabel) {
JLabel label = (JLabel)component;
enforceNoEllipsis(label.getText(), label.getIcon(), label.getIconTextGap(), g2);
}
else if (component instanceof JButton) {
JButton button = (JButton)component;
enforceNoEllipsis(button.getText(), button.getIcon(), button.getIconTextGap(), g2);
}
if (shouldRenderGreek(renderContext)) {
paintAsGreek(g2);
}
else {
paint(g2);
}
}
private void enforceNoEllipsis(String text, Icon icon, int iconGap, Graphics2D g2) {
Rectangle2D textBounds = component.getFontMetrics(component.getFont()).getStringBounds(text, g2);
double minAcceptableWidth = textBounds.getWidth();
double minAcceptableHeight = textBounds.getHeight();
if (icon != null) {
minAcceptableWidth += icon.getIconWidth();
minAcceptableWidth += iconGap;
minAcceptableHeight = Math.max(icon.getIconHeight(), minAcceptableHeight);
}
if (component.getMinimumSize().getWidth() < minAcceptableWidth ) {
Dimension newMinimumSize = new Dimension((int)Math.ceil(minAcceptableWidth), (int)Math.ceil(minAcceptableHeight));
component.setMinimumSize(newMinimumSize);
reshape();
}
}
protected boolean shouldRenderGreek(PPaintContext renderContext) {
return (renderContext.getScale() < renderCutoff
// && pSwingCanvas.getInteracting()
)
|| minFontSize * renderContext.getScale() < 0.5;
}
/**
* Paints the Swing component as greek.
*
* @param g2 The graphics used to render the filled rectangle
*/
public void paintAsGreek(Graphics2D g2) {
Color background = component.getBackground();
Color foreground = component.getForeground();
Rectangle2D rect = getBounds();
if (background != null) {
g2.setColor(background);
}
g2.fill(rect);
if (foreground != null) {
g2.setColor(foreground);
}
g2.draw(rect);
}
/**
* Remove from the SwingWrapper; throws an exception if no canvas is
* associated with this PSwing.
*/
public void removeFromSwingWrapper() {
if (canvas != null && Arrays.asList(this.canvas.getSwingWrapper().getComponents()).contains(component)) {
this.canvas.getSwingWrapper().remove(component);
}
}
/**
* Renders to a buffered image, then draws that image to the drawing surface
* associated with g2 (usually the screen).
*
* @param g2 graphics context for rendering the JComponent
*/
public void paint(Graphics2D g2) {
if (component.getBounds().isEmpty()) {
// The component has not been initialized yet.
return;
}
PSwingRepaintManager manager = (PSwingRepaintManager) RepaintManager.currentManager(component);
manager.lockRepaint(component);
RenderingHints oldHints = g2.getRenderingHints();
g2.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
component.paint(g2);
g2.setRenderingHints(oldHints);
manager.unlockRepaint(component);
}
public void setVisible(boolean visible) {
super.setVisible(visible);
component.setVisible(visible);
}
/**
* Repaints the specified portion of this visual component Note that the
* input parameter may be modified as a result of this call.
*
* @param repaintBounds
*/
public void repaint(PBounds repaintBounds) {
Shape sh = getTransform().createTransformedShape(repaintBounds);
TEMP_REPAINT_BOUNDS2.setRect(sh.getBounds2D());
repaintFrom(TEMP_REPAINT_BOUNDS2, this);
}
/**
* Sets the Swing component's bounds to its preferred bounds unless it
* already is set to its preferred size. Also updates the visual components
* copy of these bounds
*/
public void computeBounds() {
reshape();
}
/**
* Returns the Swing component that this visual component wraps
*
* @return The Swing component that this visual component wraps
*/
public JComponent getComponent() {
return component;
}
/**
* We need to turn off double buffering of Swing components within Piccolo
* since all components contained within a native container use the same
* buffer for double buffering. With normal Swing widgets this is fine, but
* for Swing components within Piccolo this causes problems. This function
* recurses the component tree rooted at c, and turns off any double
* buffering in use. It also updates the minimum font size based on the font
* size of c and adds a property change listener to listen for changes to
* the font.
*
* @param c The Component to be recursively unDoubleBuffered
*/
void init(Component c) {
if (c.getFont() != null) {
minFontSize = Math.min(minFontSize, c.getFont().getSize());
}
if (c instanceof Container) {
Component[] children = ((Container) c).getComponents();
if (children != null) {
for (int j = 0; j < children.length; j++) {
init(children[j]);
}
}
((Container) c).addContainerListener(new ContainerAdapter() {
/** {@inheritDoc} */
public void componentAdded(final ContainerEvent event) {
init(event.getChild());
}
});
}
if (c instanceof JComponent) {
((JComponent) c).setDoubleBuffered(false);
c.addPropertyChangeListener("font", this);
c.addComponentListener(new ComponentAdapter() {
public void componentResized(ComponentEvent e) {
computeBounds();
}
public void componentShown(ComponentEvent e) {
computeBounds();
}
});
}
}
/**
* Listens for changes in font on components rooted at this PSwing
*/
public void propertyChange(PropertyChangeEvent evt) {
if (component.isAncestorOf((Component) evt.getSource()) && ((Component) evt.getSource()).getFont() != null) {
minFontSize = Math.min(minFontSize, ((Component) evt.getSource()).getFont().getSize());
}
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
init(component);
}
// //////////////////////////////////////////////////////////
// /////Start methods for automatic canvas detection
// //////////////////////////////////////////////////////////
/**
* Attaches a listener to the specified node and all its parents to listen
* for a change in the PSwingCanvas. Only PROPERTY_PARENT listeners are
* added so this code wouldn't handle if a PLayer were viewed by a different
* PCamera since that constitutes a child change.
*
* @param node The child node at which to begin a parent-based traversal for
* adding listeners.
*/
private void listenForCanvas(PNode node) {
// need to get the full tree for this node
PNode p = node;
while (p != null) {
listenToNode(p);
PNode parent = p;
// System.out.println( "parent = " + parent.getClass() );
if (parent instanceof PCamera) {
PCamera cam = (PCamera) parent;
if (cam.getComponent() instanceof PSwingCanvas) {
updateCanvas((PSwingCanvas) cam.getComponent());
}
}
else if (parent instanceof PLayer) {
PLayer player = (PLayer) parent;
// System.out.println( "Found player: with " +
// player.getCameraCount() + " cameras" );
for (int i = 0; i < player.getCameraCount(); i++) {
PCamera cam = player.getCamera(i);
if (cam.getComponent() instanceof PSwingCanvas) {
updateCanvas((PSwingCanvas) cam.getComponent());
break;
}
}
}
p = p.getParent();
}
}
/**
* Attach a listener to the specified node, if one has not already been
* attached.
*
* @param node the node to listen to for parent/pcamera/pcanvas changes
*/
private void listenToNode(PNode node) {
// System.out.println( "listeningTo.size() = " + listeningTo.size() );
if (!listeningTo(node)) {
listeningTo.add(node);
node.addPropertyChangeListener(PNode.PROPERTY_PARENT, parentListener);
}
}
/**
* Determine whether this PSwing is already listening to the specified node
* for camera/canvas changes.
*
* @param node the node to check
* @return true if this PSwing is already listening to the specified node
* for camera/canvas changes
*/
private boolean listeningTo(PNode node) {
for (int i = 0; i < listeningTo.size(); i++) {
PNode pNode = (PNode) listeningTo.get(i);
if (pNode == node) {
return true;
}
}
return false;
}
/**
* Clear out all the listeners registered to make sure there are no stray
* references
*
* @param fromParent Parent to start with for clearing listeners
*/
private void clearListeners(PNode fromParent) {
if (fromParent == null) {
return;
}
if (listeningTo(fromParent)) {
fromParent.removePropertyChangeListener(PNode.PROPERTY_PARENT, parentListener);
listeningTo.remove(fromParent);
clearListeners(fromParent.getParent());
}
}
/**
* Removes this PSwing from previous PSwingCanvas (if any), and ensure that
* this PSwing is attached to the new PSwingCanvas.
*
* @param newCanvas the new PSwingCanvas (may be null)
*/
private void updateCanvas(PSwingCanvas newCanvas) {
if (newCanvas != canvas) {
if (canvas != null) {
canvas.removePSwing(this);
}
canvas = newCanvas;
if (newCanvas != null) {
canvas.addPSwing(this);
reshape();
repaint();
}
}
}
// //////////////////////////////////////////////////////////
// /////End methods for automatic canvas detection
// //////////////////////////////////////////////////////////
}
| extras/src/main/java/edu/umd/cs/piccolox/pswing/PSwing.java | /*
* Copyright (c) 2008-2009, Piccolo2D project, http://piccolo2d.org
* Copyright (c) 1998-2008, University of Maryland
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* None of the name of the University of Maryland, the name of the Piccolo2D project, or the names of its
* contributors may be used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.umd.cs.piccolox.pswing;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.ContainerAdapter;
import java.awt.event.ContainerEvent;
import java.awt.geom.Rectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.RepaintManager;
import javax.swing.border.Border;
import edu.umd.cs.piccolo.PCamera;
import edu.umd.cs.piccolo.PLayer;
import edu.umd.cs.piccolo.PNode;
import edu.umd.cs.piccolo.util.PBounds;
import edu.umd.cs.piccolo.util.PPaintContext;
/*
This message was sent to Sun on August 27, 1999
-----------------------------------------------
We are currently developing Piccolo, a "scenegraph" for use in 2D graphics.
One of our ultimate goals is to support Swing lightweight components
within Piccolo, whose graphical space supports arbitray affine transforms.
The challenge in this pursuit is getting the components to respond and
render properly though not actually displayed in a standard Java component
hierarchy.
The first issues involved making the Swing components focusable and
showing. This was accomplished by adding the Swing components to a 0x0
JComponent which was in turn added to our main Piccolo application component.
To our good fortune, a Java component is showing merely if it and its
ancestors are showing and not based on whether it is ACTUALLY visible.
Likewise, focus in a JComponent depends merely on the component's
containing window having focus.
The second issue involved capturing the repaint calls on a Swing
component. Normally, for a repaint and the consequent call to
paintImmediately, a Swing component obtains the Graphics object necessary
to render itself through the Java component heirarchy. However, for Piccolo
we would like the component to render using a Graphics object that Piccolo
may have arbitrarily transformed in some way. By capturing in the
RepaintManager the repaint calls made on our special Swing components, we
are able to redirect the repaint requests through the Piccolo architecture to
put the Graphics in its proper context. Unfortunately, this means that
if the Swing component contains other Swing components, then any repaint
requests made by one of these nested components must go through
the Piccolo architecture then through the top level Swing component
down to the nested Swing component. This normally doesn't cause a
problem. However, if calling paint on one of these nested
children causes a call to repaint then an infinite loop ensues. This does
in fact happen in the Swing components that use cell renderers. Before
the cell renderer is painted, it is invalidated and consequently
repainted. We solved this problem by putting a lock on repaint calls for
a component while that component is painting. (A similar problem faced
the Swing team over this same issue. They solved it by inserting a
CellRendererPane to capture the renderer's invalidate calls.)
Another issue arose over the forwarding of mouse events to the Swing
components. Since our Swing components are not actually displayed on
screen in the standard manner, we must manually dispatch any MouseEvents
we want the component to receive. Hence, we needed to find the deepest
visible component at a particular location that accepts MouseEvents.
Finding the deepest visible component at a point was achieved with the
"findComponentAt" method in java.awt.Container. With the
"getListeners(Class listenerType)" method added in JDK1.3 Beta we are able
to determine if the component has any Mouse Listeners. However, we haven't
yet found a way to determine if MouseEvents have been specifically enabled
for a component. The package private method "eventEnabled" in
java.awt.Component does exactly what we want but is, of course,
inaccessible. In order to dispatch events correctly we would need a
public accessor to the method "boolean eventEnabled(AWTEvent)" in
java.awt.Component.
Still another issue involves the management of cursors when the mouse is
over a Swing component in our application. To the Java mechanisms, the
mouse never appears to enter the bounds of the Swing components since they
are contained by a 0x0 JComponent. Hence, we must manually change the
cursor when the mouse enters one of the Swing components in our
application. This generally works but becomes a problem if the Swing
component's cursor changes while we are over that Swing component (for
instance, if you resize a Table Column). In order to manage cursors
properly, we would need setCursor to fire property change events.
With the above fixes, most Swing components work. The only Swing
components that are definitely broken are ToolTips and those that rely on
JPopupMenu. In order to implement ToolTips properly, we would need to have
a method in ToolTipManager that allows us to set the current manager, as
is possible with RepaintManager. In order to implement JPopupMenu, we
will likely need to reimplement JPopupMenu to function in Piccolo with
a transformed Graphics and to insert itself in the proper place in the
Piccolo scenegraph.
*/
/**
* <b>PSwing</b> is used to add Swing Components to a Piccolo canvas.
* <p>
* Example: adding a swing JButton to a PCanvas:
*
* <pre>
* PSwingCanvas canvas = new PSwingCanvas();
* JButton button = new JButton("Button");
* swing = new PSwing(canvas, button);
* canvas.getLayer().addChild(swing);
*
* <pre>
* </p>
* <p>
* NOTE: PSwing has the current limitation that it does not listen for Container
* events. This is only an issue if you create a PSwing and later add Swing
* components to the PSwing's component hierarchy that do not have double
* buffering turned off or have a smaller font size than the minimum font size
* of the original PSwing's component hierarchy.
* </p>
* <p>
* For instance, the following bit of code will give unexpected results:
*
* <pre>
* JPanel panel = new JPanel();
* PSwing swing = new PSwing(panel);
* JPanel newChild = new JPanel();
* newChild.setDoubleBuffered(true);
* panel.add(newChild);
* </pre>
*
* </p>
* <p>
* NOTE: PSwing cannot be correctly interacted with through multiple cameras.
* There is no support for it yet.
* </p>
* <p>
* NOTE: PSwing is java.io.Serializable.
* </p>
* <p>
* <b>Warning:</b> Serialized objects of this class will not be compatible with
* future Piccolo releases. The current serialization support is appropriate for
* short term storage or RMI between applications running the same version of
* Piccolo. A future release of Piccolo will provide support for long term
* persistence.
* </p>
*
* @author Sam R. Reid
* @author Benjamin B. Bederson
* @author Lance E. Good
*
* 3-23-2007 edited to automatically detect PCamera/PSwingCanvas to
* allow single-arg constructor usage
*/
public class PSwing extends PNode implements Serializable, PropertyChangeListener {
/**
* Used as a hashtable key for this object in the Swing component's client
* properties.
*/
public static final String PSWING_PROPERTY = "PSwing";
private static PBounds TEMP_REPAINT_BOUNDS2 = new PBounds();
/**
* The cutoff at which the Swing component is rendered greek
*/
private double renderCutoff = 0.3;
private JComponent component = null;
private double minFontSize = Double.MAX_VALUE;
private Stroke defaultStroke = new BasicStroke();
private Font defaultFont = new Font("Serif", Font.PLAIN, 12);
private PSwingCanvas canvas;
// //////////////////////////////////////////////////////////
// /////Following fields are for automatic canvas/camera detection
// //////////////////////////////////////////////////////////
/*
* Keep track of which nodes we've attached listeners to since no built in
* support in PNode
*/
private ArrayList listeningTo = new ArrayList();
/* The parent listener for camera/canvas changes */
private PropertyChangeListener parentListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
PNode parent = (PNode) evt.getNewValue();
clearListeners((PNode) evt.getOldValue());
if (parent != null) {
listenForCanvas(parent);
}
else {
updateCanvas(null);
}
}
};
/**
* Constructs a new visual component wrapper for the Swing component.
*
* @param component The swing component to be wrapped
*/
public PSwing(JComponent component) {
this.component = component;
component.putClientProperty(PSWING_PROPERTY, this);
init(component);
component.revalidate();
component.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
reshape();
}
});
component.addComponentListener(new ComponentAdapter() {
public void componentHidden(ComponentEvent e) {
setVisible(false);
}
public void componentShown(ComponentEvent e) {
setVisible(true);
}
});
reshape();
listenForCanvas(this);
}
/**
* Deprecated constructor for application code still depending on this
* signature.
*
* @param pSwingCanvas
* @param component
* @deprecated
*/
public PSwing(PSwingCanvas pSwingCanvas, JComponent component) {
this(component);
}
/**
* Ensures the bounds of the underlying component are accurate, and sets the
* bounds of this PNode.
*/
void reshape() {
Border border = component.getBorder();
int width = (int) Math.max(component.getMinimumSize().width, component.getPreferredSize().width);
int height = (int) Math.max(component.getMinimumSize().height, component.getPreferredSize().height);
if (border != null) {
Insets borderInsets = border.getBorderInsets(component);
width += borderInsets.left + borderInsets.right;
height += borderInsets.top + borderInsets.bottom;
}
component.setBounds(0, 0, width, height);
setBounds(0, 0, width, height);
}
/**
* Determines if the Swing component should be rendered normally or as a
* filled rectangle.
* <p/>
* The transform, clip, and composite will be set appropriately when this
* object is rendered. It is up to this object to restore the transform,
* clip, and composite of the Graphics2D if this node changes any of them.
* However, the color, font, and stroke are unspecified by Piccolo. This
* object should set those things if they are used, but they do not need to
* be restored.
*
* @param renderContext Contains information about current render.
*/
public void paint(PPaintContext renderContext) {
Graphics2D g2 = renderContext.getGraphics();
if (defaultStroke == null) {
defaultStroke = new BasicStroke();
}
g2.setStroke(defaultStroke);
if (defaultFont == null) {
defaultFont = new Font("Serif", Font.PLAIN, 12);
}
g2.setFont(defaultFont);
if (component.getParent() == null) {
// pSwingCanvas.getSwingWrapper().add( component );
component.revalidate();
}
if (component instanceof JLabel) {
JLabel label = (JLabel)component;
enforceNoEllipsis(label.getText(), label.getIcon(), label.getIconTextGap(), g2);
}
else if (component instanceof JButton) {
JButton button = (JButton)component;
enforceNoEllipsis(button.getText(), button.getIcon(), button.getIconTextGap(), g2);
}
if (shouldRenderGreek(renderContext)) {
paintAsGreek(g2);
}
else {
paint(g2);
}
}
private void enforceNoEllipsis(String text, Icon icon, int iconGap, Graphics2D g2) {
Rectangle2D textBounds = component.getFontMetrics(component.getFont()).getStringBounds(text, g2);
double minAcceptableWidth = textBounds.getWidth();
double minAcceptableHeight = textBounds.getHeight();
if (icon != null) {
minAcceptableWidth += icon.getIconWidth();
minAcceptableWidth += iconGap;
minAcceptableHeight = Math.max(icon.getIconHeight(), minAcceptableHeight);
}
if (component.getMinimumSize().getWidth() < minAcceptableWidth ) {
Dimension newMinimumSize = new Dimension((int)Math.ceil(minAcceptableWidth), (int)Math.ceil(minAcceptableHeight));
component.setMinimumSize(newMinimumSize);
reshape();
}
}
protected boolean shouldRenderGreek(PPaintContext renderContext) {
return (renderContext.getScale() < renderCutoff
// && pSwingCanvas.getInteracting()
)
|| minFontSize * renderContext.getScale() < 0.5;
}
/**
* Paints the Swing component as greek.
*
* @param g2 The graphics used to render the filled rectangle
*/
public void paintAsGreek(Graphics2D g2) {
Color background = component.getBackground();
Color foreground = component.getForeground();
Rectangle2D rect = getBounds();
if (background != null) {
g2.setColor(background);
}
g2.fill(rect);
if (foreground != null) {
g2.setColor(foreground);
}
g2.draw(rect);
}
/**
* Remove from the SwingWrapper; throws an exception if no canvas is
* associated with this PSwing.
*/
public void removeFromSwingWrapper() {
if (canvas != null && Arrays.asList(this.canvas.getSwingWrapper().getComponents()).contains(component)) {
this.canvas.getSwingWrapper().remove(component);
}
}
/**
* Renders to a buffered image, then draws that image to the drawing surface
* associated with g2 (usually the screen).
*
* @param g2 graphics context for rendering the JComponent
*/
public void paint(Graphics2D g2) {
if (component.getBounds().isEmpty()) {
// The component has not been initialized yet.
return;
}
PSwingRepaintManager manager = (PSwingRepaintManager) RepaintManager.currentManager(component);
manager.lockRepaint(component);
RenderingHints oldHints = g2.getRenderingHints();
g2.setRenderingHint(RenderingHints.KEY_FRACTIONALMETRICS, RenderingHints.VALUE_FRACTIONALMETRICS_ON);
g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
component.paint(g2);
g2.setRenderingHints(oldHints);
manager.unlockRepaint(component);
}
public void setVisible(boolean visible) {
super.setVisible(visible);
component.setVisible(visible);
}
/**
* Repaints the specified portion of this visual component Note that the
* input parameter may be modified as a result of this call.
*
* @param repaintBounds
*/
public void repaint(PBounds repaintBounds) {
Shape sh = getTransform().createTransformedShape(repaintBounds);
TEMP_REPAINT_BOUNDS2.setRect(sh.getBounds2D());
repaintFrom(TEMP_REPAINT_BOUNDS2, this);
}
/**
* Sets the Swing component's bounds to its preferred bounds unless it
* already is set to its preferred size. Also updates the visual components
* copy of these bounds
*/
public void computeBounds() {
reshape();
}
/**
* Returns the Swing component that this visual component wraps
*
* @return The Swing component that this visual component wraps
*/
public JComponent getComponent() {
return component;
}
/**
* We need to turn off double buffering of Swing components within Piccolo
* since all components contained within a native container use the same
* buffer for double buffering. With normal Swing widgets this is fine, but
* for Swing components within Piccolo this causes problems. This function
* recurses the component tree rooted at c, and turns off any double
* buffering in use. It also updates the minimum font size based on the font
* size of c and adds a property change listener to listen for changes to
* the font.
*
* @param c The Component to be recursively unDoubleBuffered
*/
void init(Component c) {
if (c.getFont() != null) {
minFontSize = Math.min(minFontSize, c.getFont().getSize());
}
if (c instanceof Container) {
Component[] children = ((Container) c).getComponents();
if (children != null) {
for (int j = 0; j < children.length; j++) {
init(children[j]);
}
}
((Container) c).addContainerListener(new ContainerAdapter() {
/** {@inheritDoc} */
public void componentAdded(final ContainerEvent event) {
init(event.getChild());
}
});
}
if (c instanceof JComponent) {
((JComponent) c).setDoubleBuffered(false);
c.addPropertyChangeListener("font", this);
c.addComponentListener(new ComponentAdapter() {
public void componentResized(ComponentEvent e) {
computeBounds();
}
public void componentShown(ComponentEvent e) {
computeBounds();
}
});
}
}
/**
* Listens for changes in font on components rooted at this PSwing
*/
public void propertyChange(PropertyChangeEvent evt) {
if (component.isAncestorOf((Component) evt.getSource()) && ((Component) evt.getSource()).getFont() != null) {
minFontSize = Math.min(minFontSize, ((Component) evt.getSource()).getFont().getSize());
}
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
init(component);
}
// //////////////////////////////////////////////////////////
// /////Start methods for automatic canvas detection
// //////////////////////////////////////////////////////////
/**
* Attaches a listener to the specified node and all its parents to listen
* for a change in the PSwingCanvas. Only PROPERTY_PARENT listeners are
* added so this code wouldn't handle if a PLayer were viewed by a different
* PCamera since that constitutes a child change.
*
* @param node The child node at which to begin a parent-based traversal for
* adding listeners.
*/
private void listenForCanvas(PNode node) {
// need to get the full tree for this node
PNode p = node;
while (p != null) {
listenToNode(p);
PNode parent = p;
// System.out.println( "parent = " + parent.getClass() );
if (parent instanceof PCamera) {
PCamera cam = (PCamera) parent;
if (cam.getComponent() instanceof PSwingCanvas) {
updateCanvas((PSwingCanvas) cam.getComponent());
}
}
else if (parent instanceof PLayer) {
PLayer player = (PLayer) parent;
// System.out.println( "Found player: with " +
// player.getCameraCount() + " cameras" );
for (int i = 0; i < player.getCameraCount(); i++) {
PCamera cam = player.getCamera(i);
if (cam.getComponent() instanceof PSwingCanvas) {
updateCanvas((PSwingCanvas) cam.getComponent());
break;
}
}
}
p = p.getParent();
}
}
/**
* Attach a listener to the specified node, if one has not already been
* attached.
*
* @param node the node to listen to for parent/pcamera/pcanvas changes
*/
private void listenToNode(PNode node) {
// System.out.println( "listeningTo.size() = " + listeningTo.size() );
if (!listeningTo(node)) {
listeningTo.add(node);
node.addPropertyChangeListener(PNode.PROPERTY_PARENT, parentListener);
}
}
/**
* Determine whether this PSwing is already listening to the specified node
* for camera/canvas changes.
*
* @param node the node to check
* @return true if this PSwing is already listening to the specified node
* for camera/canvas changes
*/
private boolean listeningTo(PNode node) {
for (int i = 0; i < listeningTo.size(); i++) {
PNode pNode = (PNode) listeningTo.get(i);
if (pNode == node) {
return true;
}
}
return false;
}
/**
* Clear out all the listeners registered to make sure there are no stray
* references
*
* @param fromParent Parent to start with for clearing listeners
*/
private void clearListeners(PNode fromParent) {
if (fromParent == null) {
return;
}
if (listeningTo(fromParent)) {
fromParent.removePropertyChangeListener(PNode.PROPERTY_PARENT, parentListener);
listeningTo.remove(fromParent);
clearListeners(fromParent.getParent());
}
}
/**
* Removes this PSwing from previous PSwingCanvas (if any), and ensure that
* this PSwing is attached to the new PSwingCanvas.
*
* @param newCanvas the new PSwingCanvas (may be null)
*/
private void updateCanvas(PSwingCanvas newCanvas) {
if (newCanvas != canvas) {
if (canvas != null) {
canvas.removePSwing(this);
}
canvas = newCanvas;
if (newCanvas != null) {
canvas.addPSwing(this);
reshape();
repaint();
}
}
}
// //////////////////////////////////////////////////////////
// /////End methods for automatic canvas detection
// //////////////////////////////////////////////////////////
}
| Very strange slider behaviour in mac. Trying a fix.
git-svn-id: d976a3fa9fd96fa05fb374ae920b691bf85e82cb@552 aadc08cf-1350-0410-9b51-cf97fce99a1b
| extras/src/main/java/edu/umd/cs/piccolox/pswing/PSwing.java | Very strange slider behaviour in mac. Trying a fix. | <ide><path>xtras/src/main/java/edu/umd/cs/piccolox/pswing/PSwing.java
<ide> Border border = component.getBorder();
<ide>
<ide> int width = (int) Math.max(component.getMinimumSize().width, component.getPreferredSize().width);
<del> int height = (int) Math.max(component.getMinimumSize().height, component.getPreferredSize().height);
<add> int height = (int) component.getPreferredSize().height;
<ide>
<ide> if (border != null) {
<ide> Insets borderInsets = border.getBorderInsets(component);
<ide> width += borderInsets.left + borderInsets.right;
<del> height += borderInsets.top + borderInsets.bottom;
<ide> }
<ide>
<ide> component.setBounds(0, 0, width, height); |
|
Java | apache-2.0 | a4056d17e57e535855382779195d906f8f1ad34c | 0 | WestwoodCompSci/SkippableTV_15.4,WestwoodCompSci/SkippableTV_15.4 | package backend;
public class Episode {
int time;
int number;
int rating;
public Episode(int n, int t, int r)
{
time =t;
number = n;
rating =r;
}
public int getEpisodeTime()
{
return time;
}
public int getRating()
{
return rating;
}
public int getNumber()
{
return number;
}
}
| src/backend/Episode.java | package backend;
public class Episode {
int time;
int number;
int rating;
public Episode(int n, int t, int r)
{
time =t;
number = n;
rating =r;
}
public int getEpisodeTime()
{
return time;
}
}
| added get title/num methods correctly backend
| src/backend/Episode.java | added get title/num methods correctly backend | <ide><path>rc/backend/Episode.java
<ide> {
<ide> return time;
<ide> }
<add> public int getRating()
<add> {
<add> return rating;
<add> }
<add> public int getNumber()
<add> {
<add> return number;
<add> }
<ide> }
<ide> |
|
Java | agpl-3.0 | 64628345f5c0a869e55bb79bc6f9a1c1b0dfc289 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | e3e02e8e-2e5f-11e5-9284-b827eb9e62be | hello.java | e3da9eba-2e5f-11e5-9284-b827eb9e62be | e3e02e8e-2e5f-11e5-9284-b827eb9e62be | hello.java | e3e02e8e-2e5f-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>e3da9eba-2e5f-11e5-9284-b827eb9e62be
<add>e3e02e8e-2e5f-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | eea252905140dbda01478690dc56bdc40183297c | 0 | OrangeRhymeLabs/HelenusDB,OrangeRhymeLabs/HelenusDB,OrangeRhymeLabs/OrangeDB,OrangeRhymeLabs/OrangeDB | /*
Copyright 2015, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.orangerhymelabs.orangedb.cassandra;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.exceptions.InvalidTypeException;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.orangerhymelabs.orangedb.exception.DuplicateItemException;
import com.orangerhymelabs.orangedb.exception.InvalidIdentifierException;
import com.orangerhymelabs.orangedb.exception.ItemNotFoundException;
import com.orangerhymelabs.orangedb.exception.StorageException;
import com.orangerhymelabs.orangedb.persistence.Identifier;
/**
* @author tfredrich
* @since Jun 8, 2015
* @param <T> The type stored in this repository.
*/
public abstract class AbstractCassandraRepository<T>
{
private Session session;
private String keyspace;
private PreparedStatement createStmt;
private PreparedStatement updateStmt;
private PreparedStatement readAllStmt;
private PreparedStatement readStmt;
private PreparedStatement deleteStmt;
public AbstractCassandraRepository(Session session, String keyspace)
{
this.session = session;
this.keyspace = keyspace;
initializeStatements();
}
protected void initializeStatements()
{
createStmt = prepare(buildCreateStatement());
updateStmt = prepare(buildUpdateStatement());
readAllStmt = prepare(buildReadAllStatement());
readStmt = prepare(buildReadStatement());
deleteStmt = prepare(buildDeleteStatement());
}
public void createAsync(T entity, FutureCallback<T> callback)
{
ResultSetFuture future = null;
try
{
future = _create(entity);
}
catch(Exception e)
{
callback.onFailure(e);
return;
}
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new DuplicateItemException(entity.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T create(T entity)
{
try
{
ResultSet rs = _create(entity).get();
if (rs.wasApplied())
{
return entity;
}
throw new DuplicateItemException(entity.toString());
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
protected ResultSetFuture _create(T entity)
{
BoundStatement bs = new BoundStatement(createStmt);
bindCreate(bs, entity);
return session.executeAsync(bs);
}
public void updateAsync(T entity, FutureCallback<T> callback)
{
ResultSetFuture future = null;
try
{
future = _update(entity);
}
catch (Exception e)
{
callback.onFailure(e);
return;
}
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new ItemNotFoundException(entity.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T update(T entity)
{
try
{
ResultSet rs = _update(entity).get();
if (rs.wasApplied())
{
return entity;
}
throw new ItemNotFoundException(entity.toString());
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _update(T entity)
{
BoundStatement bs = new BoundStatement(updateStmt);
bindUpdate(bs, entity);
return session.executeAsync(bs);
}
public void deleteAsync(Identifier id, FutureCallback<T> callback)
{
ResultSetFuture future = _delete(id);
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new ItemNotFoundException(id.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public void delete(Identifier id)
{
try
{
_delete(id).get();
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _delete(Identifier id)
{
BoundStatement bs = new BoundStatement(deleteStmt);
bindIdentity(bs, id);
return session.executeAsync(bs);
}
public void readAsync(Identifier id, FutureCallback<T> callback)
{
ResultSetFuture future;
try
{
future = _read(id);
}
catch(Exception e)
{
callback.onFailure(e);
return;
}
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (result.isExhausted())
{
callback.onFailure(new ItemNotFoundException(id.toString()));
}
T entity = marshalRow(result.one());
callback.onSuccess(entity);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T read(Identifier id)
{
try
{
ResultSet rs = _read(id).get();
if (rs.isExhausted())
{
throw new ItemNotFoundException(id.toString());
}
T entity = marshalRow(rs.one());
return entity;
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _read(Identifier id)
{
BoundStatement bs = new BoundStatement(readStmt);
bindIdentity(bs, id);
return session.executeAsync(bs);
}
public void readAllAsync(FutureCallback<List<T>> callback, Object... parms)
{
ResultSetFuture future = _readAll(parms);
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
callback.onSuccess(marshalAll(result));
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public List<T> readAll(Object... parms)
{
try
{
ResultSet rs = _readAll(parms).get();
return marshalAll(rs);
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _readAll(Object... parms)
{
BoundStatement bs = new BoundStatement(readAllStmt);
if (parms != null)
{
bs.bind(parms);
}
return session.executeAsync(bs);
}
/**
* Read all given identifiers.
*
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* Note that the callback is not called with a single List of results. Instead it is called once for each
* Identifier provided in the call, whether successful or failed.
*
* @param callback a FutureCallback to notify for each ID in the ids array.
* @param ids the partition keys (identifiers) to select.
*/
public void readInAsync(FutureCallback<T> callback, Identifier... ids)
{
List<ListenableFuture<ResultSet>> futures = _readIn(ids);
for (ListenableFuture<ResultSet> future : futures)
{
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.isExhausted())
{
callback.onSuccess(marshalRow(result.one()));
}
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor()
);
}
}
/**
* Read all given identifiers, returning them as a List. No order is guaranteed in the resulting list.
*
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* @param ids the partition keys (identifiers) to select.
* @return a list of entities identified by the identifiers given in the 'ids' parameter.
*/
public List<T> readIn(Identifier... ids)
{
if (ids == null) return Collections.emptyList();
List<ListenableFuture<ResultSet>> futures = _readIn(ids);
List<T> results = new ArrayList<T>(futures.size());
try
{
for (ListenableFuture<ResultSet> future : futures)
{
ResultSet rs = future.get();
if (!rs.isExhausted())
{
results.add(marshalRow(rs.one()));
}
}
return results;
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
/**
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* @param ids the partition keys (identifiers) to select.
* @return a List of ListenableFuture instances for each underlying ResultSet--one for each ID.
*/
private List<ListenableFuture<ResultSet>> _readIn(Identifier... ids)
{
if (ids == null) return null;
List<ResultSetFuture> futures = new ArrayList<ResultSetFuture>(ids.length);
BoundStatement bs = new BoundStatement(readStmt);
for (Identifier id : ids)
{
bindIdentity(bs, id);
futures.add(session.executeAsync(bs));
}
return Futures.inCompletionOrder(futures);
}
public Session session()
{
return session;
}
protected String keyspace()
{
return keyspace;
}
protected void bindIdentity(BoundStatement bs, Identifier id)
{
try
{
bs.bind(id.components().toArray());
}
catch(InvalidTypeException e)
{
throw new InvalidIdentifierException(e);
}
}
protected List<T> marshalAll(ResultSet rs)
{
List<T> results = new ArrayList<T>();
Iterator<Row> i = rs.iterator();
while (i.hasNext())
{
results.add(marshalRow(i.next()));
}
return results;
}
protected abstract void bindCreate(BoundStatement bs, T entity);
protected abstract void bindUpdate(BoundStatement bs, T entity);
protected abstract T marshalRow(Row row);
protected abstract String buildCreateStatement();
protected abstract String buildUpdateStatement();
protected abstract String buildReadStatement();
protected abstract String buildReadAllStatement();
protected abstract String buildDeleteStatement();
protected PreparedStatement prepare(String statement)
{
if (statement == null || statement.trim().isEmpty())
{
return null;
}
return session().prepare(statement);
}
}
| cassandra/src/main/java/com/orangerhymelabs/orangedb/cassandra/AbstractCassandraRepository.java | /*
Copyright 2015, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.orangerhymelabs.orangedb.cassandra;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ExecutionException;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.orangerhymelabs.orangedb.exception.DuplicateItemException;
import com.orangerhymelabs.orangedb.exception.ItemNotFoundException;
import com.orangerhymelabs.orangedb.exception.StorageException;
import com.orangerhymelabs.orangedb.persistence.Identifier;
/**
* @author tfredrich
* @since Jun 8, 2015
* @param <T> The type stored in this repository.
*/
public abstract class AbstractCassandraRepository<T>
{
private Session session;
private String keyspace;
private PreparedStatement createStmt;
private PreparedStatement updateStmt;
private PreparedStatement readAllStmt;
private PreparedStatement readStmt;
private PreparedStatement deleteStmt;
public AbstractCassandraRepository(Session session, String keyspace)
{
this.session = session;
this.keyspace = keyspace;
initializeStatements();
}
protected void initializeStatements()
{
createStmt = prepare(buildCreateStatement());
updateStmt = prepare(buildUpdateStatement());
readAllStmt = prepare(buildReadAllStatement());
readStmt = prepare(buildReadStatement());
deleteStmt = prepare(buildDeleteStatement());
}
public void createAsync(T entity, FutureCallback<T> callback)
{
ResultSetFuture future = null;
try
{
future = _create(entity);
}
catch(Exception e)
{
callback.onFailure(e);
return;
}
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new DuplicateItemException(entity.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T create(T entity)
{
try
{
ResultSet rs = _create(entity).get();
if (rs.wasApplied())
{
return entity;
}
throw new DuplicateItemException(entity.toString());
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
protected ResultSetFuture _create(T entity)
{
BoundStatement bs = new BoundStatement(createStmt);
bindCreate(bs, entity);
return session.executeAsync(bs);
}
public void updateAsync(T entity, FutureCallback<T> callback)
{
ResultSetFuture future = null;
try
{
future = _update(entity);
}
catch (Exception e)
{
callback.onFailure(e);
return;
}
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new ItemNotFoundException(entity.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T update(T entity)
{
try
{
ResultSet rs = _update(entity).get();
if (rs.wasApplied())
{
return entity;
}
throw new ItemNotFoundException(entity.toString());
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _update(T entity)
{
BoundStatement bs = new BoundStatement(updateStmt);
bindUpdate(bs, entity);
return session.executeAsync(bs);
}
public void deleteAsync(Identifier id, FutureCallback<T> callback)
{
ResultSetFuture future = _delete(id);
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.wasApplied())
{
callback.onFailure(new ItemNotFoundException(id.toString()));
}
callback.onSuccess(null);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public void delete(Identifier id)
{
try
{
_delete(id).get();
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _delete(Identifier id)
{
BoundStatement bs = new BoundStatement(deleteStmt);
bindIdentity(bs, id);
return session.executeAsync(bs);
}
public void readAsync(Identifier id, FutureCallback<T> callback)
{
ResultSetFuture future = _read(id);
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (result.isExhausted())
{
callback.onFailure(new ItemNotFoundException(id.toString()));
}
T entity = marshalRow(result.one());
callback.onSuccess(entity);
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public T read(Identifier id)
{
try
{
ResultSet rs = _read(id).get();
if (rs.isExhausted())
{
throw new ItemNotFoundException(id.toString());
}
T entity = marshalRow(rs.one());
return entity;
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _read(Identifier id)
{
BoundStatement bs = new BoundStatement(readStmt);
bindIdentity(bs, id);
return session.executeAsync(bs);
}
public void readAllAsync(FutureCallback<List<T>> callback, Object... parms)
{
ResultSetFuture future = _readAll(parms);
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
callback.onSuccess(marshalAll(result));
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor());
}
public List<T> readAll(Object... parms)
{
try
{
ResultSet rs = _readAll(parms).get();
return marshalAll(rs);
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
private ResultSetFuture _readAll(Object... parms)
{
BoundStatement bs = new BoundStatement(readAllStmt);
if (parms != null)
{
bs.bind(parms);
}
return session.executeAsync(bs);
}
/**
* Read all given identifiers.
*
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* Note that the callback is not called with a single List of results. Instead it is called once for each
* Identifier provided in the call, whether successful or failed.
*
* @param callback a FutureCallback to notify for each ID in the ids array.
* @param ids the partition keys (identifiers) to select.
*/
public void readInAsync(FutureCallback<T> callback, Identifier... ids)
{
List<ListenableFuture<ResultSet>> futures = _readIn(ids);
for (ListenableFuture<ResultSet> future : futures)
{
Futures.addCallback(future, new FutureCallback<ResultSet>()
{
@Override
public void onSuccess(ResultSet result)
{
if (!result.isExhausted())
{
callback.onSuccess(marshalRow(result.one()));
}
}
@Override
public void onFailure(Throwable t)
{
callback.onFailure(t);
}
}, MoreExecutors.sameThreadExecutor()
);
}
}
/**
* Read all given identifiers, returning them as a List. No order is guaranteed in the resulting list.
*
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* @param ids the partition keys (identifiers) to select.
* @return a list of entities identified by the identifiers given in the 'ids' parameter.
*/
public List<T> readIn(Identifier... ids)
{
if (ids == null) return Collections.emptyList();
List<ListenableFuture<ResultSet>> futures = _readIn(ids);
List<T> results = new ArrayList<T>(futures.size());
try
{
for (ListenableFuture<ResultSet> future : futures)
{
ResultSet rs = future.get();
if (!rs.isExhausted())
{
results.add(marshalRow(rs.one()));
}
}
return results;
}
catch (InterruptedException | ExecutionException e)
{
throw new StorageException(e);
}
}
/**
* Leverages the token-awareness of the driver to optimally query each node directly instead of invoking a
* coordinator node. Sends an individual query for each partition key, so reaches the appropriate replica
* directly and collates the results client-side.
*
* @param ids the partition keys (identifiers) to select.
* @return a List of ListenableFuture instances for each underlying ResultSet--one for each ID.
*/
private List<ListenableFuture<ResultSet>> _readIn(Identifier... ids)
{
if (ids == null) return null;
List<ResultSetFuture> futures = new ArrayList<ResultSetFuture>(ids.length);
BoundStatement bs = new BoundStatement(readStmt);
for (Identifier id : ids)
{
bindIdentity(bs, id);
futures.add(session.executeAsync(bs));
}
return Futures.inCompletionOrder(futures);
}
public Session session()
{
return session;
}
protected String keyspace()
{
return keyspace;
}
protected void bindIdentity(BoundStatement bs, Identifier id)
{
bs.bind(id.components().toArray());
}
protected List<T> marshalAll(ResultSet rs)
{
List<T> results = new ArrayList<T>();
Iterator<Row> i = rs.iterator();
while (i.hasNext())
{
results.add(marshalRow(i.next()));
}
return results;
}
protected abstract void bindCreate(BoundStatement bs, T entity);
protected abstract void bindUpdate(BoundStatement bs, T entity);
protected abstract T marshalRow(Row row);
protected abstract String buildCreateStatement();
protected abstract String buildUpdateStatement();
protected abstract String buildReadStatement();
protected abstract String buildReadAllStatement();
protected abstract String buildDeleteStatement();
protected PreparedStatement prepare(String statement)
{
if (statement == null || statement.trim().isEmpty())
{
return null;
}
return session().prepare(statement);
}
}
| Enhanced AbstractCassandraRepository.bindIdentity() and readAsync() to catch InvalidTypeException and handle it appropriately.
| cassandra/src/main/java/com/orangerhymelabs/orangedb/cassandra/AbstractCassandraRepository.java | Enhanced AbstractCassandraRepository.bindIdentity() and readAsync() to catch InvalidTypeException and handle it appropriately. | <ide><path>assandra/src/main/java/com/orangerhymelabs/orangedb/cassandra/AbstractCassandraRepository.java
<ide> import com.datastax.driver.core.ResultSetFuture;
<ide> import com.datastax.driver.core.Row;
<ide> import com.datastax.driver.core.Session;
<add>import com.datastax.driver.core.exceptions.InvalidTypeException;
<ide> import com.google.common.util.concurrent.FutureCallback;
<ide> import com.google.common.util.concurrent.Futures;
<ide> import com.google.common.util.concurrent.ListenableFuture;
<ide> import com.google.common.util.concurrent.MoreExecutors;
<ide> import com.orangerhymelabs.orangedb.exception.DuplicateItemException;
<add>import com.orangerhymelabs.orangedb.exception.InvalidIdentifierException;
<ide> import com.orangerhymelabs.orangedb.exception.ItemNotFoundException;
<ide> import com.orangerhymelabs.orangedb.exception.StorageException;
<ide> import com.orangerhymelabs.orangedb.persistence.Identifier;
<ide>
<ide> public void readAsync(Identifier id, FutureCallback<T> callback)
<ide> {
<del> ResultSetFuture future = _read(id);
<add> ResultSetFuture future;
<add>
<add> try
<add> {
<add> future = _read(id);
<add> }
<add> catch(Exception e)
<add> {
<add> callback.onFailure(e);
<add> return;
<add> }
<add>
<ide> Futures.addCallback(future, new FutureCallback<ResultSet>()
<ide> {
<ide> @Override
<ide>
<ide> protected void bindIdentity(BoundStatement bs, Identifier id)
<ide> {
<del> bs.bind(id.components().toArray());
<add> try
<add> {
<add> bs.bind(id.components().toArray());
<add> }
<add> catch(InvalidTypeException e)
<add> {
<add> throw new InvalidIdentifierException(e);
<add> }
<ide> }
<ide>
<ide> protected List<T> marshalAll(ResultSet rs) |
|
Java | agpl-3.0 | f7d5c4f3867e2e395a0528e888fa85bf52af5ea8 | 0 | akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow | /*
* Copyright (C) 2010-2013 Stichting Akvo (Akvo Foundation)
*
* This file is part of Akvo FLOW.
*
* Akvo FLOW is free software: you can redistribute it and modify it under the terms of
* the GNU Affero General Public License (AGPL) as published by the Free Software Foundation,
* either version 3 of the License or any later version.
*
* Akvo FLOW is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License included below for more details.
*
* The full license text can also be seen at <http://www.gnu.org/licenses/agpl.html>.
*/
package org.waterforpeople.mapping.app.web;
import java.io.BufferedInputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipInputStream;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import javax.servlet.http.HttpServletRequest;
import net.sf.jsr107cache.Cache;
import net.sf.jsr107cache.CacheFactory;
import net.sf.jsr107cache.CacheManager;
import org.waterforpeople.mapping.analytics.dao.SurveyInstanceSummaryDao;
import org.waterforpeople.mapping.analytics.dao.SurveyQuestionSummaryDao;
import org.waterforpeople.mapping.analytics.domain.SurveyQuestionSummary;
import org.waterforpeople.mapping.app.web.dto.DataProcessorRequest;
import org.waterforpeople.mapping.dao.AccessPointDao;
import org.waterforpeople.mapping.dao.DeviceFilesDao;
import org.waterforpeople.mapping.dao.QuestionAnswerStoreDao;
import org.waterforpeople.mapping.dao.SurveyInstanceDAO;
import org.waterforpeople.mapping.dataexport.SurveyReplicationImporter;
import org.waterforpeople.mapping.domain.AccessPoint;
import org.waterforpeople.mapping.domain.GeoCoordinates;
import org.waterforpeople.mapping.domain.QuestionAnswerStore;
import org.waterforpeople.mapping.domain.SurveyInstance;
import com.gallatinsystems.common.Constants;
import com.gallatinsystems.device.domain.DeviceFiles;
import com.gallatinsystems.framework.rest.AbstractRestApiServlet;
import com.gallatinsystems.framework.rest.RestRequest;
import com.gallatinsystems.framework.rest.RestResponse;
import com.gallatinsystems.framework.servlet.PersistenceFilter;
import com.gallatinsystems.gis.location.GeoLocationService;
import com.gallatinsystems.gis.location.GeoLocationServiceGeonamesImpl;
import com.gallatinsystems.gis.location.GeoPlace;
import com.gallatinsystems.messaging.dao.MessageDao;
import com.gallatinsystems.messaging.domain.Message;
import com.gallatinsystems.operations.dao.ProcessingStatusDao;
import com.gallatinsystems.operations.domain.ProcessingStatus;
import com.gallatinsystems.survey.dao.QuestionDao;
import com.gallatinsystems.survey.dao.QuestionGroupDao;
import com.gallatinsystems.survey.dao.QuestionOptionDao;
import com.gallatinsystems.survey.dao.SurveyDAO;
import com.gallatinsystems.survey.dao.SurveyUtils;
import com.gallatinsystems.survey.domain.Question;
import com.gallatinsystems.survey.domain.QuestionGroup;
import com.gallatinsystems.survey.domain.QuestionOption;
import com.gallatinsystems.survey.domain.Survey;
import com.gallatinsystems.surveyal.dao.SurveyedLocaleDao;
import com.gallatinsystems.surveyal.domain.SurveyedLocale;
import com.google.appengine.api.backends.BackendServiceFactory;
import com.google.appengine.api.memcache.MemcacheService;
import com.google.appengine.api.memcache.stdimpl.GCacheFactory;
import com.google.appengine.api.taskqueue.Queue;
import com.google.appengine.api.taskqueue.QueueFactory;
import com.google.appengine.api.taskqueue.TaskOptions;
/**
* Restful servlet to do bulk data update operations
*
* @author Christopher Fagiani
*
*/
public class DataProcessorRestServlet extends AbstractRestApiServlet {
private static final Logger log = Logger
.getLogger("DataProcessorRestServlet");
private static final long serialVersionUID = -7902002525342262821L;
private static final String REBUILD_Q_SUM_STATUS_KEY = "rebuildQuestionSummary";
private static final Integer QAS_PAGE_SIZE = 300;
private static final String QAS_TO_REMOVE = "QAStoRemove";
@Override
protected RestRequest convertRequest() throws Exception {
HttpServletRequest req = getRequest();
RestRequest restRequest = new DataProcessorRequest();
restRequest.populateFromHttpRequest(req);
return restRequest;
}
@Override
protected RestResponse handleRequest(RestRequest req) throws Exception {
DataProcessorRequest dpReq = (DataProcessorRequest) req;
if (DataProcessorRequest.PROJECT_FLAG_UPDATE_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
updateAccessPointProjectFlag(dpReq.getCountry(), dpReq.getCursor());
} else if (DataProcessorRequest.REBUILD_QUESTION_SUMMARY_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
rebuildQuestionSummary(dpReq.getSurveyId());
} else if (DataProcessorRequest.COPY_SURVEY.equalsIgnoreCase(dpReq
.getAction())) {
copySurvey(dpReq.getSurveyId(), Long.valueOf(dpReq.getSource()));
} else if (DataProcessorRequest.IMPORT_REMOTE_SURVEY_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
SurveyReplicationImporter sri = new SurveyReplicationImporter();
sri.executeImport(dpReq.getSource(), dpReq.getSurveyId(), dpReq.getApiKey());
} else if (DataProcessorRequest.RESCORE_AP_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
rescoreAp(dpReq.getCountry());
} else if (DataProcessorRequest.FIX_NULL_SUBMITTER_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixNullSubmitter();
} else if (DataProcessorRequest.FIX_DUPLICATE_OTHER_TEXT_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixDuplicateOtherText();
} else if (DataProcessorRequest.TRIM_OPTIONS.equalsIgnoreCase(dpReq
.getAction())) {
trimOptions();
} else if (DataProcessorRequest.FIX_OPTIONS2VALUES_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixOptions2Values();
} else if (DataProcessorRequest.SURVEY_INSTANCE_SUMMARIZER
.equalsIgnoreCase(dpReq.getAction())) {
surveyInstanceSummarizer(dpReq.getSurveyInstanceId(),
dpReq.getQasId(), dpReq.getDelta());
} else if (DataProcessorRequest.DELETE_DUPLICATE_QAS
.equalsIgnoreCase(dpReq.getAction())) {
deleteDuplicatedQAS(dpReq.getOffset());
} else if (DataProcessorRequest.CHANGE_LOCALE_TYPE_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
changeLocaleType(dpReq.getSurveyId());
}
return new RestResponse();
}
@Override
protected void writeOkResponse(RestResponse resp) throws Exception {
getResponse().setStatus(200);
}
/**
* lists all QuestionOptions and trims trailing/leading spaces. Then does
* the same for any dependencies
*/
private void trimOptions() {
QuestionOptionDao optDao = new QuestionOptionDao();
QuestionDao qDao = new QuestionDao();
String cursor = null;
do {
List<QuestionOption> optList = optDao.list(cursor);
if (optList != null && optList.size() > 0) {
for (QuestionOption opt : optList) {
if (opt.getText() != null) {
opt.setText(opt.getText().trim());
}
List<Question> qList = qDao.listQuestionsByDependency(opt
.getQuestionId());
for (Question q : qList) {
if (q.getText() != null) {
q.setText(q.getText().trim());
}
if (q.getDependentQuestionAnswer() != null) {
q.setDependentQuestionAnswer(q
.getDependentQuestionAnswer().trim());
}
}
}
if (optList.size() == QuestionOptionDao.DEFAULT_RESULT_COUNT) {
cursor = QuestionOptionDao.getCursor(optList);
} else {
cursor = null;
}
} else {
cursor = null;
}
} while (cursor != null);
}
/**
* lists all "OTHER" type answers and checks if the last tokens are
* duplicates. Fixes if they are.
*/
private void fixDuplicateOtherText() {
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
int pageSize = 300;
String cursor = null;
do {
List<QuestionAnswerStore> answers = qasDao.listByTypeAndDate(
"OTHER", null, null, cursor, pageSize);
if (answers != null) {
for (QuestionAnswerStore ans : answers) {
if (ans.getValue() != null && ans.getValue().contains("|")) {
String[] tokens = ans.getValue().split("\\|");
String lastVal = null;
boolean droppedVal = false;
StringBuilder buf = new StringBuilder();
for (int i = 0; i < tokens.length; i++) {
if (!tokens[i].equals(lastVal)) {
lastVal = tokens[i];
if (i > 0) {
buf.append("|");
}
buf.append(lastVal);
} else {
droppedVal = true;
}
}
if (droppedVal) {
// only dirty the object if needed
ans.setValue(buf.toString());
}
}
}
if (answers.size() == pageSize) {
cursor = QuestionAnswerStoreDao.getCursor(answers);
} else {
cursor = null;
}
}
} while (cursor != null);
}
/**
* changes the surveyedLocales attached to a survey to a different type
* 1 = Point
* 2 = Household
* 3 = Public Institutions
*/
private void changeLocaleType(Long surveyId) {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
SurveyedLocaleDao slDao = new SurveyedLocaleDao();
SurveyDAO sDao = new SurveyDAO();
String cursor = null;
// get the desired type from the survey definition
Survey s = sDao.getByKey(surveyId);
if (s != null && s.getPointType() != null && s.getPointType().length() > 0){
String localeType = s.getPointType();
do {
List<SurveyInstance> siList = siDao.listSurveyInstanceBySurvey(surveyId, QAS_PAGE_SIZE, cursor);
List<SurveyedLocale> slList = new ArrayList<SurveyedLocale>();
if (siList != null && siList.size() > 0) {
for (SurveyInstance si : siList) {
if (si.getSurveyedLocaleId() != null) {
SurveyedLocale sl = slDao.getByKey(si.getSurveyedLocaleId());
if (sl != null){
// if the locale type is not set or if it is not equal to the survey setting,
// reset the local type
if (sl.getLocaleType() == null || !sl.getLocaleType().equals(localeType)) {
sl.setLocaleType(localeType);
slList.add(sl);
}
}
}
}
slDao.save(slList);
if (siList.size() == QAS_PAGE_SIZE) {
cursor = SurveyInstanceDAO.getCursor(siList);
} else {
cursor = null;
}
}
} while (cursor != null);
}
}
private void fixNullSubmitter() {
SurveyInstanceDAO instDao = new SurveyInstanceDAO();
List<SurveyInstance> instances = instDao.listInstanceBySubmitter(null);
if (instances != null) {
DeviceFilesDao dfDao = new DeviceFilesDao();
for (SurveyInstance inst : instances) {
DeviceFiles f = dfDao.findByInstance(inst.getKey().getId());
if (f != null) {
try {
URL url = new URL(f.getURI());
BufferedInputStream bis = new BufferedInputStream(
url.openStream());
ZipInputStream zis = new ZipInputStream(bis);
ArrayList<String> lines = TaskServlet
.extractDataFromZip(zis);
zis.close();
if (lines != null) {
for (String line : lines) {
String[] parts = line.split("\t");
if (parts.length > 5) {
if (parts[5] != null
&& parts[5].trim().length() > 0) {
inst.setSubmitterName(parts[5]);
break;
}
}
}
}
} catch (Exception e) {
log("Could not download zip: " + f.getURI());
}
}
}
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void deleteDuplicatedQAS(Long offset) {
log.log(Level.INFO, "Searching for duplicated QAS entities [Offset: "
+ offset + "]");
Cache cache = null;
Map props = new HashMap();
props.put(GCacheFactory.EXPIRATION_DELTA, 12 * 60 * 60);
props.put(MemcacheService.SetPolicy.SET_ALWAYS, true);
try {
CacheFactory cacheFactory = CacheManager.getInstance()
.getCacheFactory();
cache = cacheFactory.createCache(props);
} catch (Exception e) {
log.log(Level.SEVERE,
"Couldn't initialize cache: " + e.getMessage(), e);
}
if (cache == null) {
return;
}
final PersistenceManager pm = PersistenceFilter.getManager();
final Query q = pm.newQuery(QuestionAnswerStore.class);
q.setOrdering("createdDateTime asc");
q.setRange(offset, offset + QAS_PAGE_SIZE);
final List<QuestionAnswerStore> results = (List<QuestionAnswerStore>) q
.execute();
List<QuestionAnswerStore> toRemove;
if (cache.containsKey(QAS_TO_REMOVE)) {
toRemove = (List<QuestionAnswerStore>) cache.get(QAS_TO_REMOVE);
} else {
toRemove = new ArrayList<QuestionAnswerStore>();
}
for (QuestionAnswerStore item : results) {
final Long questionID = Long.valueOf(item.getQuestionID());
final Long surveyInstanceId = item.getSurveyInstanceId();
final Map<Long, Long> k = new HashMap<Long, Long>();
k.put(surveyInstanceId, questionID);
if (cache.containsKey(k)) {
toRemove.add(item);
}
cache.put(k, true);
}
if (results.size() == QAS_PAGE_SIZE) {
cache.put(QAS_TO_REMOVE, toRemove);
final TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.DELETE_DUPLICATE_QAS)
.param(DataProcessorRequest.OFFSET_PARAM,
String.valueOf(offset + QAS_PAGE_SIZE))
.header("Host",
BackendServiceFactory.getBackendService()
.getBackendAddress("dataprocessor"));
Queue queue = QueueFactory.getDefaultQueue();
queue.add(options);
} else {
log.log(Level.INFO, "Removing " + toRemove.size()
+ " duplicated QAS entities");
QuestionAnswerStoreDao dao = new QuestionAnswerStoreDao();
pm.makePersistentAll(toRemove); // some objects are in "transient" state
dao.delete(toRemove);
}
}
/**
* this method re-runs scoring on all access points for a country
*
* @param country
*/
private void rescoreAp(String country) {
AccessPointDao apDao = new AccessPointDao();
String cursor = null;
List<AccessPoint> apList = null;
do {
apList = apDao.listAccessPointByLocation(country, null, null, null,
cursor, 200);
if (apList != null) {
cursor = AccessPointDao.getCursor(apList);
for (AccessPoint ap : apList) {
apDao.save(ap);
}
}
} while (apList != null && apList.size() == 200);
}
private void copySurvey(Long surveyId, Long sourceId) {
final QuestionGroupDao qgDao = new QuestionGroupDao();
final Map<Long, Long> qMap = new HashMap<Long, Long>();
final List<QuestionGroup> qgList = qgDao.listQuestionGroupBySurvey(sourceId);
if (qgList == null) {
log.log(Level.INFO, "Nothing to copy from {surveyId: " + sourceId
+ "} to {surveyId: " + surveyId + "}");
SurveyUtils.resetSurveyState(surveyId);
return;
}
log.log(Level.INFO, "Copying " + qgList.size() + " `QuestionGroup`");
int qgOrder = 1;
for (final QuestionGroup sourceQG : qgList) {
SurveyUtils.copyQuestionGroup(sourceQG, surveyId, qgOrder++, qMap);
}
SurveyUtils.resetSurveyState(surveyId);
MessageDao mDao = new MessageDao();
Message message = new Message();
message.setObjectId(surveyId);
message.setActionAbout("copySurvey");
message.setShortMessage("Copy from Survey " + sourceId + " to Survey " + surveyId + " completed");
mDao.save(message);
}
/**
* rebuilds the SurveyQuestionSummary object for ALL data in the system.
* This method should only be run on a Backend instance as it is unlikely to
* complete within the task duration limits on other instances.
*/
private void rebuildQuestionSummary(Long surveyId) {
ProcessingStatusDao statusDao = new ProcessingStatusDao();
List<Long> surveyIds = new ArrayList<Long>();
if (surveyId == null) {
SurveyDAO surveyDao = new SurveyDAO();
List<Survey> surveys = surveyDao.list(Constants.ALL_RESULTS);
if (surveys != null) {
for (Survey s : surveys) {
surveyIds.add(s.getKey().getId());
}
}
} else {
surveyIds.add(surveyId);
}
for (Long sid : surveyIds) {
ProcessingStatus status = statusDao
.getStatusByCode(REBUILD_Q_SUM_STATUS_KEY
+ (sid != null ? ":" + sid : ""));
Map<String, Map<String, Long>> summaryMap = summarizeQuestionAnswerStore(
sid, null);
if (summaryMap != null) {
saveSummaries(summaryMap);
}
// now update the status so we can know it last ran
if (status == null) {
status = new ProcessingStatus();
status.setCode(REBUILD_Q_SUM_STATUS_KEY
+ (sid != null ? ":" + sid : ""));
}
status.setInError(false);
status.setLastEventDate(new Date());
statusDao.save(status);
}
}
/**
* iterates over the new summary counts and updates the records in the
* datastore. Where appropriate, new records will be created and defunct
* records will be removed.
*
* @param summaryMap
*/
private void saveSummaries(Map<String, Map<String, Long>> summaryMap) {
SurveyQuestionSummaryDao summaryDao = new SurveyQuestionSummaryDao();
for (Entry<String, Map<String, Long>> summaryEntry : summaryMap
.entrySet()) {
List<SurveyQuestionSummary> summaryList = summaryDao
.listByQuestion(summaryEntry.getKey());
// iterate over all the counts and update the summaryList with the
// count values. Create any missing elements and remove defunct
// entries as we go
List<SurveyQuestionSummary> toDeleteList = new ArrayList<SurveyQuestionSummary>(
summaryList);
List<SurveyQuestionSummary> toCreateList = new ArrayList<SurveyQuestionSummary>();
for (Entry<String, Long> valueEntry : summaryEntry.getValue()
.entrySet()) {
String val = valueEntry.getKey();
boolean found = false;
for (SurveyQuestionSummary sum : summaryList) {
if (sum.getResponse() != null
&& sum.getResponse().equals(val)) {
// since it's still valid, remove it from toDeleteList
toDeleteList.remove(sum);
// update the count. Since we still have the
// persistenceContext open, this will automatically be
// flushed to the datastore without an explicit call to
// save
sum.setCount(valueEntry.getValue());
found = true;
}
}
if (!found) {
// need to create it
SurveyQuestionSummary s = new SurveyQuestionSummary();
s.setCount(valueEntry.getValue());
s.setQuestionId(summaryEntry.getKey());
s.setResponse(val);
toCreateList.add(s);
}
}
// delete the unseen entities
if (toDeleteList.size() > 0) {
summaryDao.delete(toDeleteList);
}
// save the new items
if (toCreateList.size() > 0) {
summaryDao.save(toCreateList);
}
// flush the datastore operation
summaryDao.flushBatch();
}
}
/**
* loads all the summarizable QuestionAnswerStore instances from the data
* store and accrues counts by value occurrence in a map keyed on the
* questionId
*
* @param sinceDate
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private Map<String, Map<String, Long>> summarizeQuestionAnswerStore(
Long surveyId, Date sinceDate) {
final QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
final QuestionDao questionDao = new QuestionDao();
final List<Question> qList = questionDao.listQuestionByType(surveyId,
Question.Type.OPTION);
Cache cache = null;
Map props = new HashMap();
props.put(GCacheFactory.EXPIRATION_DELTA, 60 * 60 * 2); // 2h
props.put(MemcacheService.SetPolicy.SET_ALWAYS, true);
try {
CacheFactory cacheFactory = CacheManager.getInstance()
.getCacheFactory();
cache = cacheFactory.createCache(props);
} catch (Exception e) {
log.log(Level.SEVERE,
"Couldn't initialize cache: " + e.getMessage(), e);
}
String cursor = null;
final Map<String, Map<String, Long>> summaryMap = new HashMap<String, Map<String, Long>>();
for (Question q : qList) {
List<QuestionAnswerStore> qasList = qasDao.listByQuestion(q.getKey().getId(), cursor, QAS_PAGE_SIZE);
if (qasList == null || qasList.size() == 0) {
continue; // skip
}
do {
cursor = QuestionAnswerStoreDao.getCursor(qasList);
for(QuestionAnswerStore qas : qasList) {
if (cache != null) {
Map<Long, String> answer = new HashMap<Long, String>();
answer.put(qas.getSurveyInstanceId(), qas.getQuestionID());
if (cache.containsKey(answer)) {
log.log(Level.INFO, "Found duplicated QAS {surveyInstanceId: " + qas.getSurveyInstanceId() +" , questionID: " + qas.getQuestionID() +"}");
continue;
}
cache.put(answer, true);
}
String val = qas.getValue();
Map<String, Long> countMap = summaryMap.get(qas
.getQuestionID());
if (countMap == null) {
countMap = new HashMap<String, Long>();
summaryMap.put(qas.getQuestionID(), countMap);
}
// split up multiple answers
String[] answers;
if (val != null && val.contains("|")) {
answers = val.split("\\|");
} else {
answers = new String[] { val };
}
// perform count
for (int i = 0; i < answers.length; i++) {
Long count = countMap.get(answers[i]);
if (count == null) {
count = 1L;
} else {
count = count + 1;
}
countMap.put(answers[i], count);
}
}
qasList = qasDao.listByQuestion(q.getKey().getId(), cursor, QAS_PAGE_SIZE);
} while (qasList != null && qasList.size() > 0);
cursor = null;
}
return summaryMap;
}
/**
* iterates over all AccessPoints in a country and applies a static set of
* rules to determine the proper value of the WFPProjectFlag
*
* @param country
* @param cursor
*/
private void updateAccessPointProjectFlag(String country, String cursor) {
AccessPointDao apDao = new AccessPointDao();
Integer pageSize = 200;
List<AccessPoint> apList = apDao.listAccessPointByLocation(country,
null, null, null, cursor, pageSize);
if (apList != null) {
for (AccessPoint ap : apList) {
if ("PE".equalsIgnoreCase(ap.getCountryCode())) {
ap.setWaterForPeopleProjectFlag(false);
} else if ("RW".equalsIgnoreCase(ap.getCountryCode())) {
ap.setWaterForPeopleProjectFlag(false);
} else if ("MW".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getCommunityName().trim()
.equalsIgnoreCase("Kachere/Makhetha/Nkolokoti")) {
ap.setCommunityName("Kachere/Makhetha/Nkolokoti");
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(true);
}
} else if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(false);
}
} else if ("HN".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getCommunityCode().startsWith("IL")) {
ap.setWaterForPeopleProjectFlag(false);
} else {
ap.setWaterForPeopleProjectFlag(true);
}
} else if ("IN".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(true);
}
} else if ("GT".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterAvailableDayVisitFlag(true);
}
} else {
// handles BO, DO, SV
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(false);
}
}
}
if (apList.size() == pageSize) {
// check for more
sendProjectUpdateTask(country, AccessPointDao.getCursor(apList));
}
}
}
/**
* Sends a message to a task queue to start or continue the processing of
* the AP Project Flag
*
* @param country
* @param cursor
*/
public static void sendProjectUpdateTask(String country, String cursor) {
TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.PROJECT_FLAG_UPDATE_ACTION)
.param(DataProcessorRequest.COUNTRY_PARAM, country)
.param(DataProcessorRequest.CURSOR_PARAM,
cursor != null ? cursor : "");
Queue queue = QueueFactory.getDefaultQueue();
queue.add(options);
}
/**
* fixes wrong Types in questionAnswerStore objects. When cleaned data is
* uploaded using an excel file, the type of the answer is set according to
* the type of the question, while the device sets the type according to a
* different convention. The action handles QAS_PAGE_SIZE items in one call, and
* invokes new tasks as necessary if there are more items.
*
* @param cursor
* @author M.T. Westra
*/
public static void fixOptions2Values() {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
List<QuestionAnswerStore> qasList = siDao.listQAOptions(null,
QAS_PAGE_SIZE, "OPTION", "FREE_TEXT", "NUMBER", "SCAN", "PHOTO");
List<QuestionAnswerStore> qasChangedList = new ArrayList<QuestionAnswerStore>();
log.log(Level.INFO, "Running fixOptions2Values");
if (qasList != null) {
for (QuestionAnswerStore qas : qasList) {
if (Question.Type.OPTION.toString().equals(qas.getType())
|| Question.Type.NUMBER.toString()
.equals(qas.getType())
|| Question.Type.FREE_TEXT.toString().equals(
qas.getType())
|| Question.Type.SCAN.toString().equals(qas.getType())) {
qas.setType("VALUE");
qasChangedList.add(qas);
} else if (Question.Type.PHOTO.toString().equals(qas.getType())) {
qas.setType("IMAGE");
qasChangedList.add(qas);
}
}
qasDao.save(qasChangedList);
// if there are more, invoke another task
if (qasList.size() == QAS_PAGE_SIZE) {
log.log(Level.INFO, "invoking another fixOptions task");
Queue queue = QueueFactory.getDefaultQueue();
TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.FIX_OPTIONS2VALUES_ACTION);
queue.add(options);
}
}
}
public static void surveyInstanceSummarizer(Long surveyInstanceId,
Long qasId, Integer delta) {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
boolean success = false;
if (surveyInstanceId != null) {
SurveyInstance si = siDao.getByKey(surveyInstanceId);
if (si != null && qasId != null) {
QuestionAnswerStore qas = qasDao.getByKey(qasId);
if (qas != null) {
GeoCoordinates geoC = null;
if (qas.getValue() != null
&& qas.getValue().trim().length() > 0) {
geoC = GeoCoordinates.extractGeoCoordinate(qas
.getValue());
}
if (geoC != null) {
GeoLocationService gisService = new GeoLocationServiceGeonamesImpl();
GeoPlace gp = gisService.findDetailedGeoPlace(geoC
.getLatitude().toString(), geoC.getLongitude()
.toString());
if (gp != null) {
SurveyInstanceSummaryDao.incrementCount(
gp.getSub1(), gp.getCountryCode(),
qas.getCollectionDate(), delta.intValue());
success = true;
}
}
}
}
}
if (!success) {
log.log(Level.SEVERE,
"Couldnt find geoplace for instance. Instance id: "
+ surveyInstanceId);
}
}
}
| GAE/src/org/waterforpeople/mapping/app/web/DataProcessorRestServlet.java | /*
* Copyright (C) 2010-2013 Stichting Akvo (Akvo Foundation)
*
* This file is part of Akvo FLOW.
*
* Akvo FLOW is free software: you can redistribute it and modify it under the terms of
* the GNU Affero General Public License (AGPL) as published by the Free Software Foundation,
* either version 3 of the License or any later version.
*
* Akvo FLOW is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License included below for more details.
*
* The full license text can also be seen at <http://www.gnu.org/licenses/agpl.html>.
*/
package org.waterforpeople.mapping.app.web;
import java.io.BufferedInputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipInputStream;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import javax.servlet.http.HttpServletRequest;
import net.sf.jsr107cache.Cache;
import net.sf.jsr107cache.CacheFactory;
import net.sf.jsr107cache.CacheManager;
import org.waterforpeople.mapping.analytics.dao.SurveyInstanceSummaryDao;
import org.waterforpeople.mapping.analytics.dao.SurveyQuestionSummaryDao;
import org.waterforpeople.mapping.analytics.domain.SurveyQuestionSummary;
import org.waterforpeople.mapping.app.web.dto.DataProcessorRequest;
import org.waterforpeople.mapping.dao.AccessPointDao;
import org.waterforpeople.mapping.dao.DeviceFilesDao;
import org.waterforpeople.mapping.dao.QuestionAnswerStoreDao;
import org.waterforpeople.mapping.dao.SurveyInstanceDAO;
import org.waterforpeople.mapping.dataexport.SurveyReplicationImporter;
import org.waterforpeople.mapping.domain.AccessPoint;
import org.waterforpeople.mapping.domain.GeoCoordinates;
import org.waterforpeople.mapping.domain.QuestionAnswerStore;
import org.waterforpeople.mapping.domain.SurveyInstance;
import com.gallatinsystems.common.Constants;
import com.gallatinsystems.device.domain.DeviceFiles;
import com.gallatinsystems.framework.rest.AbstractRestApiServlet;
import com.gallatinsystems.framework.rest.RestRequest;
import com.gallatinsystems.framework.rest.RestResponse;
import com.gallatinsystems.framework.servlet.PersistenceFilter;
import com.gallatinsystems.gis.location.GeoLocationService;
import com.gallatinsystems.gis.location.GeoLocationServiceGeonamesImpl;
import com.gallatinsystems.gis.location.GeoPlace;
import com.gallatinsystems.messaging.dao.MessageDao;
import com.gallatinsystems.messaging.domain.Message;
import com.gallatinsystems.operations.dao.ProcessingStatusDao;
import com.gallatinsystems.operations.domain.ProcessingStatus;
import com.gallatinsystems.survey.dao.QuestionDao;
import com.gallatinsystems.survey.dao.QuestionGroupDao;
import com.gallatinsystems.survey.dao.QuestionOptionDao;
import com.gallatinsystems.survey.dao.SurveyDAO;
import com.gallatinsystems.survey.dao.SurveyUtils;
import com.gallatinsystems.survey.domain.Question;
import com.gallatinsystems.survey.domain.QuestionGroup;
import com.gallatinsystems.survey.domain.QuestionOption;
import com.gallatinsystems.survey.domain.Survey;
import com.gallatinsystems.surveyal.dao.SurveyedLocaleDao;
import com.gallatinsystems.surveyal.domain.SurveyedLocale;
import com.google.appengine.api.backends.BackendServiceFactory;
import com.google.appengine.api.memcache.MemcacheService;
import com.google.appengine.api.memcache.stdimpl.GCacheFactory;
import com.google.appengine.api.taskqueue.Queue;
import com.google.appengine.api.taskqueue.QueueFactory;
import com.google.appengine.api.taskqueue.TaskOptions;
/**
* Restful servlet to do bulk data update operations
*
* @author Christopher Fagiani
*
*/
public class DataProcessorRestServlet extends AbstractRestApiServlet {
private static final Logger log = Logger
.getLogger("DataProcessorRestServlet");
private static final long serialVersionUID = -7902002525342262821L;
private static final String REBUILD_Q_SUM_STATUS_KEY = "rebuildQuestionSummary";
private static final Integer QAS_PAGE_SIZE = 300;
private static final String QAS_TO_REMOVE = "QAStoRemove";
@Override
protected RestRequest convertRequest() throws Exception {
HttpServletRequest req = getRequest();
RestRequest restRequest = new DataProcessorRequest();
restRequest.populateFromHttpRequest(req);
return restRequest;
}
@Override
protected RestResponse handleRequest(RestRequest req) throws Exception {
DataProcessorRequest dpReq = (DataProcessorRequest) req;
if (DataProcessorRequest.PROJECT_FLAG_UPDATE_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
updateAccessPointProjectFlag(dpReq.getCountry(), dpReq.getCursor());
} else if (DataProcessorRequest.REBUILD_QUESTION_SUMMARY_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
rebuildQuestionSummary(dpReq.getSurveyId());
} else if (DataProcessorRequest.COPY_SURVEY.equalsIgnoreCase(dpReq
.getAction())) {
copySurvey(dpReq.getSurveyId(), Long.valueOf(dpReq.getSource()));
} else if (DataProcessorRequest.IMPORT_REMOTE_SURVEY_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
SurveyReplicationImporter sri = new SurveyReplicationImporter();
sri.executeImport(dpReq.getSource(), dpReq.getSurveyId(), dpReq.getApiKey());
} else if (DataProcessorRequest.RESCORE_AP_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
rescoreAp(dpReq.getCountry());
} else if (DataProcessorRequest.FIX_NULL_SUBMITTER_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixNullSubmitter();
} else if (DataProcessorRequest.FIX_DUPLICATE_OTHER_TEXT_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixDuplicateOtherText();
} else if (DataProcessorRequest.TRIM_OPTIONS.equalsIgnoreCase(dpReq
.getAction())) {
trimOptions();
} else if (DataProcessorRequest.FIX_OPTIONS2VALUES_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
fixOptions2Values();
} else if (DataProcessorRequest.SURVEY_INSTANCE_SUMMARIZER
.equalsIgnoreCase(dpReq.getAction())) {
surveyInstanceSummarizer(dpReq.getSurveyInstanceId(),
dpReq.getQasId(), dpReq.getDelta());
} else if (DataProcessorRequest.DELETE_DUPLICATE_QAS
.equalsIgnoreCase(dpReq.getAction())) {
deleteDuplicatedQAS(dpReq.getOffset());
} else if (DataProcessorRequest.CHANGE_LOCALE_TYPE_ACTION
.equalsIgnoreCase(dpReq.getAction())) {
changeLocaleType(dpReq.getSurveyId());
}
return new RestResponse();
}
@Override
protected void writeOkResponse(RestResponse resp) throws Exception {
getResponse().setStatus(200);
}
/**
* lists all QuestionOptions and trims trailing/leading spaces. Then does
* the same for any dependencies
*/
private void trimOptions() {
QuestionOptionDao optDao = new QuestionOptionDao();
QuestionDao qDao = new QuestionDao();
String cursor = null;
do {
List<QuestionOption> optList = optDao.list(cursor);
if (optList != null && optList.size() > 0) {
for (QuestionOption opt : optList) {
if (opt.getText() != null) {
opt.setText(opt.getText().trim());
}
List<Question> qList = qDao.listQuestionsByDependency(opt
.getQuestionId());
for (Question q : qList) {
if (q.getText() != null) {
q.setText(q.getText().trim());
}
if (q.getDependentQuestionAnswer() != null) {
q.setDependentQuestionAnswer(q
.getDependentQuestionAnswer().trim());
}
}
}
if (optList.size() == QuestionOptionDao.DEFAULT_RESULT_COUNT) {
cursor = QuestionOptionDao.getCursor(optList);
} else {
cursor = null;
}
} else {
cursor = null;
}
} while (cursor != null);
}
/**
* lists all "OTHER" type answers and checks if the last tokens are
* duplicates. Fixes if they are.
*/
private void fixDuplicateOtherText() {
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
int pageSize = 300;
String cursor = null;
do {
List<QuestionAnswerStore> answers = qasDao.listByTypeAndDate(
"OTHER", null, null, cursor, pageSize);
if (answers != null) {
for (QuestionAnswerStore ans : answers) {
if (ans.getValue() != null && ans.getValue().contains("|")) {
String[] tokens = ans.getValue().split("\\|");
String lastVal = null;
boolean droppedVal = false;
StringBuilder buf = new StringBuilder();
for (int i = 0; i < tokens.length; i++) {
if (!tokens[i].equals(lastVal)) {
lastVal = tokens[i];
if (i > 0) {
buf.append("|");
}
buf.append(lastVal);
} else {
droppedVal = true;
}
}
if (droppedVal) {
// only dirty the object if needed
ans.setValue(buf.toString());
}
}
}
if (answers.size() == pageSize) {
cursor = QuestionAnswerStoreDao.getCursor(answers);
} else {
cursor = null;
}
}
} while (cursor != null);
}
/**
* changes the surveyedLocales attached to a survey to a different type
* 1 = Point
* 2 = Household
* 3 = Public Institutions
*/
private void changeLocaleType(Long surveyId) {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
SurveyedLocaleDao slDao = new SurveyedLocaleDao();
SurveyDAO sDao = new SurveyDAO();
String cursor = null;
// get the desired type from the survey definition
Survey s = sDao.getByKey(surveyId);
if (s != null && s.getPointType() != null && s.getPointType().length() > 0){
String localeType = s.getPointType();
do {
List<SurveyInstance> siList = siDao.listSurveyInstanceBySurvey(surveyId, QAS_PAGE_SIZE, cursor);
List<SurveyedLocale> slList = new ArrayList<SurveyedLocale>();
if (siList != null && siList.size() > 0) {
for (SurveyInstance si : siList) {
if (si.getSurveyedLocaleId() != null) {
SurveyedLocale sl = slDao.getByKey(si.getSurveyedLocaleId());
if (sl != null && !sl.getLocaleType().equals(localeType)) {
sl.setLocaleType(localeType);
slList.add(sl);
}
}
}
slDao.save(slList);
if (siList.size() == QAS_PAGE_SIZE) {
cursor = SurveyInstanceDAO.getCursor(siList);
} else {
cursor = null;
}
}
} while (cursor != null);
}
}
private void fixNullSubmitter() {
SurveyInstanceDAO instDao = new SurveyInstanceDAO();
List<SurveyInstance> instances = instDao.listInstanceBySubmitter(null);
if (instances != null) {
DeviceFilesDao dfDao = new DeviceFilesDao();
for (SurveyInstance inst : instances) {
DeviceFiles f = dfDao.findByInstance(inst.getKey().getId());
if (f != null) {
try {
URL url = new URL(f.getURI());
BufferedInputStream bis = new BufferedInputStream(
url.openStream());
ZipInputStream zis = new ZipInputStream(bis);
ArrayList<String> lines = TaskServlet
.extractDataFromZip(zis);
zis.close();
if (lines != null) {
for (String line : lines) {
String[] parts = line.split("\t");
if (parts.length > 5) {
if (parts[5] != null
&& parts[5].trim().length() > 0) {
inst.setSubmitterName(parts[5]);
break;
}
}
}
}
} catch (Exception e) {
log("Could not download zip: " + f.getURI());
}
}
}
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void deleteDuplicatedQAS(Long offset) {
log.log(Level.INFO, "Searching for duplicated QAS entities [Offset: "
+ offset + "]");
Cache cache = null;
Map props = new HashMap();
props.put(GCacheFactory.EXPIRATION_DELTA, 12 * 60 * 60);
props.put(MemcacheService.SetPolicy.SET_ALWAYS, true);
try {
CacheFactory cacheFactory = CacheManager.getInstance()
.getCacheFactory();
cache = cacheFactory.createCache(props);
} catch (Exception e) {
log.log(Level.SEVERE,
"Couldn't initialize cache: " + e.getMessage(), e);
}
if (cache == null) {
return;
}
final PersistenceManager pm = PersistenceFilter.getManager();
final Query q = pm.newQuery(QuestionAnswerStore.class);
q.setOrdering("createdDateTime asc");
q.setRange(offset, offset + QAS_PAGE_SIZE);
final List<QuestionAnswerStore> results = (List<QuestionAnswerStore>) q
.execute();
List<QuestionAnswerStore> toRemove;
if (cache.containsKey(QAS_TO_REMOVE)) {
toRemove = (List<QuestionAnswerStore>) cache.get(QAS_TO_REMOVE);
} else {
toRemove = new ArrayList<QuestionAnswerStore>();
}
for (QuestionAnswerStore item : results) {
final Long questionID = Long.valueOf(item.getQuestionID());
final Long surveyInstanceId = item.getSurveyInstanceId();
final Map<Long, Long> k = new HashMap<Long, Long>();
k.put(surveyInstanceId, questionID);
if (cache.containsKey(k)) {
toRemove.add(item);
}
cache.put(k, true);
}
if (results.size() == QAS_PAGE_SIZE) {
cache.put(QAS_TO_REMOVE, toRemove);
final TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.DELETE_DUPLICATE_QAS)
.param(DataProcessorRequest.OFFSET_PARAM,
String.valueOf(offset + QAS_PAGE_SIZE))
.header("Host",
BackendServiceFactory.getBackendService()
.getBackendAddress("dataprocessor"));
Queue queue = QueueFactory.getDefaultQueue();
queue.add(options);
} else {
log.log(Level.INFO, "Removing " + toRemove.size()
+ " duplicated QAS entities");
QuestionAnswerStoreDao dao = new QuestionAnswerStoreDao();
pm.makePersistentAll(toRemove); // some objects are in "transient" state
dao.delete(toRemove);
}
}
/**
* this method re-runs scoring on all access points for a country
*
* @param country
*/
private void rescoreAp(String country) {
AccessPointDao apDao = new AccessPointDao();
String cursor = null;
List<AccessPoint> apList = null;
do {
apList = apDao.listAccessPointByLocation(country, null, null, null,
cursor, 200);
if (apList != null) {
cursor = AccessPointDao.getCursor(apList);
for (AccessPoint ap : apList) {
apDao.save(ap);
}
}
} while (apList != null && apList.size() == 200);
}
private void copySurvey(Long surveyId, Long sourceId) {
final QuestionGroupDao qgDao = new QuestionGroupDao();
final Map<Long, Long> qMap = new HashMap<Long, Long>();
final List<QuestionGroup> qgList = qgDao.listQuestionGroupBySurvey(sourceId);
if (qgList == null) {
log.log(Level.INFO, "Nothing to copy from {surveyId: " + sourceId
+ "} to {surveyId: " + surveyId + "}");
SurveyUtils.resetSurveyState(surveyId);
return;
}
log.log(Level.INFO, "Copying " + qgList.size() + " `QuestionGroup`");
int qgOrder = 1;
for (final QuestionGroup sourceQG : qgList) {
SurveyUtils.copyQuestionGroup(sourceQG, surveyId, qgOrder++, qMap);
}
SurveyUtils.resetSurveyState(surveyId);
MessageDao mDao = new MessageDao();
Message message = new Message();
message.setObjectId(surveyId);
message.setActionAbout("copySurvey");
message.setShortMessage("Copy from Survey " + sourceId + " to Survey " + surveyId + " completed");
mDao.save(message);
}
/**
* rebuilds the SurveyQuestionSummary object for ALL data in the system.
* This method should only be run on a Backend instance as it is unlikely to
* complete within the task duration limits on other instances.
*/
private void rebuildQuestionSummary(Long surveyId) {
ProcessingStatusDao statusDao = new ProcessingStatusDao();
List<Long> surveyIds = new ArrayList<Long>();
if (surveyId == null) {
SurveyDAO surveyDao = new SurveyDAO();
List<Survey> surveys = surveyDao.list(Constants.ALL_RESULTS);
if (surveys != null) {
for (Survey s : surveys) {
surveyIds.add(s.getKey().getId());
}
}
} else {
surveyIds.add(surveyId);
}
for (Long sid : surveyIds) {
ProcessingStatus status = statusDao
.getStatusByCode(REBUILD_Q_SUM_STATUS_KEY
+ (sid != null ? ":" + sid : ""));
Map<String, Map<String, Long>> summaryMap = summarizeQuestionAnswerStore(
sid, null);
if (summaryMap != null) {
saveSummaries(summaryMap);
}
// now update the status so we can know it last ran
if (status == null) {
status = new ProcessingStatus();
status.setCode(REBUILD_Q_SUM_STATUS_KEY
+ (sid != null ? ":" + sid : ""));
}
status.setInError(false);
status.setLastEventDate(new Date());
statusDao.save(status);
}
}
/**
* iterates over the new summary counts and updates the records in the
* datastore. Where appropriate, new records will be created and defunct
* records will be removed.
*
* @param summaryMap
*/
private void saveSummaries(Map<String, Map<String, Long>> summaryMap) {
SurveyQuestionSummaryDao summaryDao = new SurveyQuestionSummaryDao();
for (Entry<String, Map<String, Long>> summaryEntry : summaryMap
.entrySet()) {
List<SurveyQuestionSummary> summaryList = summaryDao
.listByQuestion(summaryEntry.getKey());
// iterate over all the counts and update the summaryList with the
// count values. Create any missing elements and remove defunct
// entries as we go
List<SurveyQuestionSummary> toDeleteList = new ArrayList<SurveyQuestionSummary>(
summaryList);
List<SurveyQuestionSummary> toCreateList = new ArrayList<SurveyQuestionSummary>();
for (Entry<String, Long> valueEntry : summaryEntry.getValue()
.entrySet()) {
String val = valueEntry.getKey();
boolean found = false;
for (SurveyQuestionSummary sum : summaryList) {
if (sum.getResponse() != null
&& sum.getResponse().equals(val)) {
// since it's still valid, remove it from toDeleteList
toDeleteList.remove(sum);
// update the count. Since we still have the
// persistenceContext open, this will automatically be
// flushed to the datastore without an explicit call to
// save
sum.setCount(valueEntry.getValue());
found = true;
}
}
if (!found) {
// need to create it
SurveyQuestionSummary s = new SurveyQuestionSummary();
s.setCount(valueEntry.getValue());
s.setQuestionId(summaryEntry.getKey());
s.setResponse(val);
toCreateList.add(s);
}
}
// delete the unseen entities
if (toDeleteList.size() > 0) {
summaryDao.delete(toDeleteList);
}
// save the new items
if (toCreateList.size() > 0) {
summaryDao.save(toCreateList);
}
// flush the datastore operation
summaryDao.flushBatch();
}
}
/**
* loads all the summarizable QuestionAnswerStore instances from the data
* store and accrues counts by value occurrence in a map keyed on the
* questionId
*
* @param sinceDate
* @return
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private Map<String, Map<String, Long>> summarizeQuestionAnswerStore(
Long surveyId, Date sinceDate) {
final QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
final QuestionDao questionDao = new QuestionDao();
final List<Question> qList = questionDao.listQuestionByType(surveyId,
Question.Type.OPTION);
Cache cache = null;
Map props = new HashMap();
props.put(GCacheFactory.EXPIRATION_DELTA, 60 * 60 * 2); // 2h
props.put(MemcacheService.SetPolicy.SET_ALWAYS, true);
try {
CacheFactory cacheFactory = CacheManager.getInstance()
.getCacheFactory();
cache = cacheFactory.createCache(props);
} catch (Exception e) {
log.log(Level.SEVERE,
"Couldn't initialize cache: " + e.getMessage(), e);
}
String cursor = null;
final Map<String, Map<String, Long>> summaryMap = new HashMap<String, Map<String, Long>>();
for (Question q : qList) {
List<QuestionAnswerStore> qasList = qasDao.listByQuestion(q.getKey().getId(), cursor, QAS_PAGE_SIZE);
if (qasList == null || qasList.size() == 0) {
continue; // skip
}
do {
cursor = QuestionAnswerStoreDao.getCursor(qasList);
for(QuestionAnswerStore qas : qasList) {
if (cache != null) {
Map<Long, String> answer = new HashMap<Long, String>();
answer.put(qas.getSurveyInstanceId(), qas.getQuestionID());
if (cache.containsKey(answer)) {
log.log(Level.INFO, "Found duplicated QAS {surveyInstanceId: " + qas.getSurveyInstanceId() +" , questionID: " + qas.getQuestionID() +"}");
continue;
}
cache.put(answer, true);
}
String val = qas.getValue();
Map<String, Long> countMap = summaryMap.get(qas
.getQuestionID());
if (countMap == null) {
countMap = new HashMap<String, Long>();
summaryMap.put(qas.getQuestionID(), countMap);
}
// split up multiple answers
String[] answers;
if (val != null && val.contains("|")) {
answers = val.split("\\|");
} else {
answers = new String[] { val };
}
// perform count
for (int i = 0; i < answers.length; i++) {
Long count = countMap.get(answers[i]);
if (count == null) {
count = 1L;
} else {
count = count + 1;
}
countMap.put(answers[i], count);
}
}
qasList = qasDao.listByQuestion(q.getKey().getId(), cursor, QAS_PAGE_SIZE);
} while (qasList != null && qasList.size() > 0);
cursor = null;
}
return summaryMap;
}
/**
* iterates over all AccessPoints in a country and applies a static set of
* rules to determine the proper value of the WFPProjectFlag
*
* @param country
* @param cursor
*/
private void updateAccessPointProjectFlag(String country, String cursor) {
AccessPointDao apDao = new AccessPointDao();
Integer pageSize = 200;
List<AccessPoint> apList = apDao.listAccessPointByLocation(country,
null, null, null, cursor, pageSize);
if (apList != null) {
for (AccessPoint ap : apList) {
if ("PE".equalsIgnoreCase(ap.getCountryCode())) {
ap.setWaterForPeopleProjectFlag(false);
} else if ("RW".equalsIgnoreCase(ap.getCountryCode())) {
ap.setWaterForPeopleProjectFlag(false);
} else if ("MW".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getCommunityName().trim()
.equalsIgnoreCase("Kachere/Makhetha/Nkolokoti")) {
ap.setCommunityName("Kachere/Makhetha/Nkolokoti");
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(true);
}
} else if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(false);
}
} else if ("HN".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getCommunityCode().startsWith("IL")) {
ap.setWaterForPeopleProjectFlag(false);
} else {
ap.setWaterForPeopleProjectFlag(true);
}
} else if ("IN".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(true);
}
} else if ("GT".equalsIgnoreCase(ap.getCountryCode())) {
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterAvailableDayVisitFlag(true);
}
} else {
// handles BO, DO, SV
if (ap.getWaterForPeopleProjectFlag() == null) {
ap.setWaterForPeopleProjectFlag(false);
}
}
}
if (apList.size() == pageSize) {
// check for more
sendProjectUpdateTask(country, AccessPointDao.getCursor(apList));
}
}
}
/**
* Sends a message to a task queue to start or continue the processing of
* the AP Project Flag
*
* @param country
* @param cursor
*/
public static void sendProjectUpdateTask(String country, String cursor) {
TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.PROJECT_FLAG_UPDATE_ACTION)
.param(DataProcessorRequest.COUNTRY_PARAM, country)
.param(DataProcessorRequest.CURSOR_PARAM,
cursor != null ? cursor : "");
Queue queue = QueueFactory.getDefaultQueue();
queue.add(options);
}
/**
* fixes wrong Types in questionAnswerStore objects. When cleaned data is
* uploaded using an excel file, the type of the answer is set according to
* the type of the question, while the device sets the type according to a
* different convention. The action handles QAS_PAGE_SIZE items in one call, and
* invokes new tasks as necessary if there are more items.
*
* @param cursor
* @author M.T. Westra
*/
public static void fixOptions2Values() {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
List<QuestionAnswerStore> qasList = siDao.listQAOptions(null,
QAS_PAGE_SIZE, "OPTION", "FREE_TEXT", "NUMBER", "SCAN", "PHOTO");
List<QuestionAnswerStore> qasChangedList = new ArrayList<QuestionAnswerStore>();
log.log(Level.INFO, "Running fixOptions2Values");
if (qasList != null) {
for (QuestionAnswerStore qas : qasList) {
if (Question.Type.OPTION.toString().equals(qas.getType())
|| Question.Type.NUMBER.toString()
.equals(qas.getType())
|| Question.Type.FREE_TEXT.toString().equals(
qas.getType())
|| Question.Type.SCAN.toString().equals(qas.getType())) {
qas.setType("VALUE");
qasChangedList.add(qas);
} else if (Question.Type.PHOTO.toString().equals(qas.getType())) {
qas.setType("IMAGE");
qasChangedList.add(qas);
}
}
qasDao.save(qasChangedList);
// if there are more, invoke another task
if (qasList.size() == QAS_PAGE_SIZE) {
log.log(Level.INFO, "invoking another fixOptions task");
Queue queue = QueueFactory.getDefaultQueue();
TaskOptions options = TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.FIX_OPTIONS2VALUES_ACTION);
queue.add(options);
}
}
}
public static void surveyInstanceSummarizer(Long surveyInstanceId,
Long qasId, Integer delta) {
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
boolean success = false;
if (surveyInstanceId != null) {
SurveyInstance si = siDao.getByKey(surveyInstanceId);
if (si != null && qasId != null) {
QuestionAnswerStore qas = qasDao.getByKey(qasId);
if (qas != null) {
GeoCoordinates geoC = null;
if (qas.getValue() != null
&& qas.getValue().trim().length() > 0) {
geoC = GeoCoordinates.extractGeoCoordinate(qas
.getValue());
}
if (geoC != null) {
GeoLocationService gisService = new GeoLocationServiceGeonamesImpl();
GeoPlace gp = gisService.findDetailedGeoPlace(geoC
.getLatitude().toString(), geoC.getLongitude()
.toString());
if (gp != null) {
SurveyInstanceSummaryDao.incrementCount(
gp.getSub1(), gp.getCountryCode(),
qas.getCollectionDate(), delta.intValue());
success = true;
}
}
}
}
}
if (!success) {
log.log(Level.SEVERE,
"Couldnt find geoplace for instance. Instance id: "
+ surveyInstanceId);
}
}
}
| Issue #396 - fix logic locale type reset | GAE/src/org/waterforpeople/mapping/app/web/DataProcessorRestServlet.java | Issue #396 - fix logic locale type reset | <ide><path>AE/src/org/waterforpeople/mapping/app/web/DataProcessorRestServlet.java
<ide> for (SurveyInstance si : siList) {
<ide> if (si.getSurveyedLocaleId() != null) {
<ide> SurveyedLocale sl = slDao.getByKey(si.getSurveyedLocaleId());
<del> if (sl != null && !sl.getLocaleType().equals(localeType)) {
<del> sl.setLocaleType(localeType);
<del> slList.add(sl);
<add> if (sl != null){
<add> // if the locale type is not set or if it is not equal to the survey setting,
<add> // reset the local type
<add> if (sl.getLocaleType() == null || !sl.getLocaleType().equals(localeType)) {
<add> sl.setLocaleType(localeType);
<add> slList.add(sl);
<add> }
<ide> }
<ide> }
<ide> } |
|
Java | apache-2.0 | 1bc379fe4c0ac184284b29cb039e68ff5da2efd0 | 0 | vtkhir/kaa,sashadidukh/kaa,aglne/kaa,aglne/kaa,aglne/kaa,vtkhir/kaa,vtkhir/kaa,vtkhir/kaa,sashadidukh/kaa,vtkhir/kaa,sashadidukh/kaa,sashadidukh/kaa,aglne/kaa,vtkhir/kaa,aglne/kaa,sashadidukh/kaa,sashadidukh/kaa,sashadidukh/kaa,vtkhir/kaa,aglne/kaa,aglne/kaa,sashadidukh/kaa,vtkhir/kaa | /*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.control.service.loadmgmt.dynamicmgmt;
import org.kaaproject.kaa.server.common.zk.gen.LoadInfo;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* The Class OperationsServerLoadHistory
*
* @author Andrey Panasenko
*/
public class OperationsServerLoadHistory {
private final List<OperationsServerLoad> history;
private long maxHistoryTimeLive = 600 * 1000;
public OperationsServerLoadHistory(long maxHistoryTimeLiv) {
setMaxHistoryTimeLive(maxHistoryTimeLiv);
history = new CopyOnWriteArrayList<OperationsServerLoad>();
}
/**
* Adds the Operations server load to the history
*
* @param load the load
*/
public void addOpsServerLoad(LoadInfo load) {
removeOldHistory();
history.add(new OperationsServerLoad(load));
}
/**
* Gets the history.
*
* @return the history
*/
public final List<OperationsServerLoad> getHistory() {
return history;
}
/**
* Removes the old history.
*/
private void removeOldHistory() {
long current = System.currentTimeMillis();
List<OperationsServerLoad> toDelete = new LinkedList<OperationsServerLoad>();
for (OperationsServerLoad snap : history) {
if ((current - snap.getTime()) > maxHistoryTimeLive) {
//Remove record.
toDelete.add(snap);
}
}
if (!toDelete.isEmpty()) {
for (OperationsServerLoad snap : toDelete) {
history.remove(snap);
}
toDelete.clear();
}
}
/**
* Gets the max history time live.
*
* @return the maxHistoryTimeLive
*/
public long getMaxHistoryTimeLive() {
return maxHistoryTimeLive;
}
/**
* The Class OperationsServerLoad.
*/
public class OperationsServerLoad {
private final long time;
private LoadInfo loadInfo;
protected OperationsServerLoad(LoadInfo load) {
time = System.currentTimeMillis();
this.loadInfo = load;
}
/**
* Gets the time.
*
* @return the time
*/
public long getTime() {
return time;
}
/**
* Gets the registered users count.
*
* @return the registeredUsersCount
*/
public LoadInfo getLoadInfo() {
return loadInfo;
}
/**
* Sets the registered users count.
*
* @param loadInfo the load info to set
*/
public void setLoadInfo(LoadInfo loadInfo) {
this.loadInfo = loadInfo;
}
} /**
* Sets the max history time live.
*
* @param maxHistoryTimeLive the maxHistoryTimeLive to set
*/
public void setMaxHistoryTimeLive(long maxHistoryTimeLive) {
this.maxHistoryTimeLive = maxHistoryTimeLive;
}
}
| server/node/src/main/java/org/kaaproject/kaa/server/control/service/loadmgmt/dynamicmgmt/OperationsServerLoadHistory.java | /*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.control.service.loadmgmt.dynamicmgmt;
import org.kaaproject.kaa.server.common.zk.gen.LoadInfo;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* The Class OperationsServerLoadHistory
*
* @author Andrey Panasenko
*/
public class OperationsServerLoadHistory {
private final List<OperationsServerLoad> history;
private long maxHistoryTimeLive = 600 * 1000;
/**
* The Class OperationsServerLoad.
*/
public class OperationsServerLoad {
private final long time;
private LoadInfo loadInfo;
protected OperationsServerLoad(LoadInfo load) {
time = System.currentTimeMillis();
this.loadInfo = load;
}
/**
* Gets the time.
*
* @return the time
*/
public long getTime() {
return time;
}
/**
* Gets the registered users count.
*
* @return the registeredUsersCount
*/
public LoadInfo getLoadInfo() {
return loadInfo;
}
/**
* Sets the registered users count.
*
* @param loadInfo the load info to set
*/
public void setLoadInfo(LoadInfo loadInfo) {
this.loadInfo = loadInfo;
}
}
public OperationsServerLoadHistory(long maxHistoryTimeLiv) {
setMaxHistoryTimeLive(maxHistoryTimeLiv);
history = new CopyOnWriteArrayList<OperationsServerLoad>();
}
/**
* Adds the Operations server load to the history
*
* @param load the load
*/
public void addOpsServerLoad(LoadInfo load) {
removeOldHistory();
history.add(new OperationsServerLoad(load));
}
/**
* Gets the history.
*
* @return the history
*/
public final List<OperationsServerLoad> getHistory() {
return history;
}
/**
* Removes the old history.
*/
private void removeOldHistory() {
long current = System.currentTimeMillis();
List<OperationsServerLoad> toDelete = new LinkedList<OperationsServerLoad>();
for (OperationsServerLoad snap : history) {
if ((current - snap.getTime()) > maxHistoryTimeLive) {
//Remove record.
toDelete.add(snap);
}
}
if (!toDelete.isEmpty()) {
for (OperationsServerLoad snap : toDelete) {
history.remove(snap);
}
toDelete.clear();
}
}
/**
* Gets the max history time live.
*
* @return the maxHistoryTimeLive
*/
public long getMaxHistoryTimeLive() {
return maxHistoryTimeLive;
}
/**
* The Class OperationsServerLoad.
*/
public class OperationsServerLoad {
private final long time;
private LoadInfo loadInfo;
protected OperationsServerLoad(LoadInfo load) {
time = System.currentTimeMillis();
this.loadInfo = load;
}
/**
* Gets the time.
*
* @return the time
*/
public long getTime() {
return time;
}
/**
* Gets the registered users count.
*
* @return the registeredUsersCount
*/
public LoadInfo getLoadInfo() {
return loadInfo;
}
/**
* Sets the registered users count.
*
* @param loadInfo the load info to set
*/
public void setLoadInfo(LoadInfo loadInfo) {
this.loadInfo = loadInfo;
}
} /**
* Sets the max history time live.
*
* @param maxHistoryTimeLive the maxHistoryTimeLive to set
*/
public void setMaxHistoryTimeLive(long maxHistoryTimeLive) {
this.maxHistoryTimeLive = maxHistoryTimeLive;
}
}
| Removed duplicates.
| server/node/src/main/java/org/kaaproject/kaa/server/control/service/loadmgmt/dynamicmgmt/OperationsServerLoadHistory.java | Removed duplicates. | <ide><path>erver/node/src/main/java/org/kaaproject/kaa/server/control/service/loadmgmt/dynamicmgmt/OperationsServerLoadHistory.java
<ide> public class OperationsServerLoadHistory {
<ide> private final List<OperationsServerLoad> history;
<ide> private long maxHistoryTimeLive = 600 * 1000;
<del>
<del> /**
<del> * The Class OperationsServerLoad.
<del> */
<del> public class OperationsServerLoad {
<del> private final long time;
<del> private LoadInfo loadInfo;
<del>
<del> protected OperationsServerLoad(LoadInfo load) {
<del> time = System.currentTimeMillis();
<del> this.loadInfo = load;
<del> }
<del>
<del> /**
<del> * Gets the time.
<del> *
<del> * @return the time
<del> */
<del> public long getTime() {
<del> return time;
<del> }
<del>
<del> /**
<del> * Gets the registered users count.
<del> *
<del> * @return the registeredUsersCount
<del> */
<del> public LoadInfo getLoadInfo() {
<del> return loadInfo;
<del> }
<del>
<del> /**
<del> * Sets the registered users count.
<del> *
<del> * @param loadInfo the load info to set
<del> */
<del> public void setLoadInfo(LoadInfo loadInfo) {
<del> this.loadInfo = loadInfo;
<del> }
<del> }
<ide>
<ide> public OperationsServerLoadHistory(long maxHistoryTimeLiv) {
<ide> setMaxHistoryTimeLive(maxHistoryTimeLiv); |
|
Java | apache-2.0 | e467f34cc1add0dba8f669aa49f1905245266b7c | 0 | chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import javax.jms.BytesMessage;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.QueueBrowser;
import javax.jms.Session;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ActiveMQQueue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QueueZeroPrefetchLazyDispatchPriorityTest {
private static final Logger LOG = LoggerFactory.getLogger(QueueZeroPrefetchLazyDispatchPriorityTest.class);
private final byte[] PAYLOAD = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private final int ITERATIONS = 6;
private BrokerService broker;
@Before
public void setUp() throws Exception {
broker = createBroker();
broker.start();
broker.waitUntilStarted();
}
@After
public void tearDown() throws Exception {
if (broker != null) {
broker.stop();
}
}
@Test(timeout=120000)
public void testPriorityMessages() throws Exception {
for (int i = 0; i < ITERATIONS; i++) {
// send 4 message priority MEDIUM
produceMessages(4, 4, "TestQ");
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
LOG.info("On iteration {}", i);
Thread.sleep(500);
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list " + consumeList.size());
// compare lists
assertEquals("message 1 should be priority high", 5, consumeList.get(0).getJMSPriority());
assertEquals("message 2 should be priority medium", 4, consumeList.get(1).getJMSPriority());
assertEquals("message 3 should be priority medium", 4, consumeList.get(2).getJMSPriority());
assertEquals("message 4 should be priority medium", 4, consumeList.get(3).getJMSPriority());
assertEquals("message 5 should be priority medium", 4, consumeList.get(4).getJMSPriority());
}
}
@Test(timeout=120000)
public void testPriorityMessagesMoreThanPageSize() throws Exception {
final int numToSend = 450;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
// ensure we get expiry processing
Thread.sleep(700);
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Thread.sleep(500);
LOG.info("On iteration {}", i);
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
// compare lists
assertFalse("Consumed list should not be empty", consumeList.isEmpty());
assertEquals("message 1 should be priority high", 5, consumeList.get(0).getJMSPriority());
for (int j = 1; j < (numToSend - 1); j++) {
assertEquals("message " + j + " should be priority medium", 4, consumeList.get(j).getJMSPriority());
}
}
}
@Test(timeout=120000)
public void testLongLivedPriorityConsumer() throws Exception {
final int numToSend = 150;
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue("TestQ"));
connection.start();
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Message message = consumer.receive(4000);
assertEquals("message should be priority high", 5, message.getJMSPriority());
}
} finally {
connection.close();
}
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
for (Message message : consumeList) {
assertEquals("should be priority medium", 4, message.getJMSPriority());
}
}
@Test(timeout=120000)
public void testPriorityMessagesWithJmsBrowser() throws Exception {
final int numToSend = 250;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
ArrayList<Message> browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Thread.sleep(500);
LOG.info("On iteration {}", i);
Message message = consumeOneMessage("TestQ");
assertNotNull(message);
assertEquals(5, message.getJMSPriority());
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
// compare lists
// assertEquals("Iteration: " + i
// +", message 1 should be priority high", 5,
// consumeList.get(0).getJMSPriority());
for (int j = 1; j < (numToSend - 1); j++) {
assertEquals("Iteration: " + i + ", message " + j + " should be priority medium", 4, consumeList.get(j).getJMSPriority());
}
}
}
@Test(timeout=120000)
public void testJmsBrowserGetsPagedIn() throws Exception {
final int numToSend = 10;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend, 4, "TestQ");
ArrayList<Message> browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
assertEquals(0, browsed.size());
Message message = consumeOneMessage("TestQ", Session.CLIENT_ACKNOWLEDGE);
assertNotNull(message);
browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
assertEquals("see only the paged in for pull", 1, browsed.size());
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list " + consumeList.size());
assertEquals(numToSend, consumeList.size());
}
}
private void produceMessages(int numberOfMessages, int priority, String queueName) throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
connectionFactory.setConnectionIDPrefix("pri-" + priority);
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(new ActiveMQQueue(queueName));
connection.start();
for (int i = 0; i < numberOfMessages; i++) {
BytesMessage m = session.createBytesMessage();
m.writeBytes(PAYLOAD);
m.setJMSPriority(priority);
producer.send(m, Message.DEFAULT_DELIVERY_MODE, m.getJMSPriority(), Message.DEFAULT_TIME_TO_LIVE);
}
} finally {
if (connection != null) {
connection.close();
}
}
}
private ArrayList<Message> consumeMessages(String queueName) throws Exception {
ArrayList<Message> returnedMessages = new ArrayList<Message>();
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(queueName));
connection.start();
boolean finished = false;
while (!finished) {
Message message = consumer.receive(returnedMessages.isEmpty() ? 5000 : 1000);
if (message == null) {
finished = true;
}
if (message != null) {
returnedMessages.add(message);
}
}
consumer.close();
return returnedMessages;
} finally {
if (connection != null) {
connection.close();
}
}
}
private Message consumeOneMessage(String queueName) throws Exception {
return consumeOneMessage(queueName, Session.AUTO_ACKNOWLEDGE);
}
private Message consumeOneMessage(String queueName, int ackMode) throws Exception {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, ackMode);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(queueName));
connection.start();
return consumer.receive(5000);
} finally {
if (connection != null) {
connection.close();
}
}
}
private ArrayList<Message> browseMessages(String queueName) throws Exception {
ArrayList<Message> returnedMessages = new ArrayList<Message>();
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
QueueBrowser consumer = session.createBrowser(new ActiveMQQueue(queueName));
connection.start();
Enumeration<?> enumeration = consumer.getEnumeration();
while (enumeration.hasMoreElements()) {
Message message = (Message) enumeration.nextElement();
returnedMessages.add(message);
}
return returnedMessages;
} finally {
if (connection != null) {
connection.close();
}
}
}
private BrokerService createBroker() throws Exception {
BrokerService broker = new BrokerService();
broker.setDeleteAllMessagesOnStartup(true);
// add the policy entries
PolicyMap policyMap = new PolicyMap();
List<PolicyEntry> entries = new ArrayList<PolicyEntry>();
PolicyEntry pe = new PolicyEntry();
pe.setPrioritizedMessages(true);
pe.setExpireMessagesPeriod(500);
pe.setMaxPageSize(100);
pe.setMaxExpirePageSize(0);
pe.setMaxBrowsePageSize(0);
pe.setQueuePrefetch(0);
pe.setLazyDispatch(true);
pe.setOptimizedDispatch(true);
pe.setUseCache(false);
pe.setQueue(">");
entries.add(pe);
policyMap.setPolicyEntries(entries);
broker.setDestinationPolicy(policyMap);
broker.addConnector("tcp://0.0.0.0:0");
return broker;
}
} | activemq-unit-tests/src/test/java/org/apache/activemq/usecases/QueueZeroPrefetchLazyDispatchPriorityTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import javax.jms.BytesMessage;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.QueueBrowser;
import javax.jms.Session;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ActiveMQQueue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QueueZeroPrefetchLazyDispatchPriorityTest {
private static final Logger LOG = LoggerFactory.getLogger(QueueZeroPrefetchLazyDispatchPriorityTest.class);
private final byte[] PAYLOAD = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
private final int ITERATIONS = 6;
private BrokerService broker;
@Before
public void setUp() throws Exception {
broker = createBroker();
broker.start();
broker.waitUntilStarted();
}
@After
public void tearDown() throws Exception {
if (broker != null) {
broker.stop();
}
}
@Test(timeout=120000)
public void testPriorityMessages() throws Exception {
for (int i = 0; i < ITERATIONS; i++) {
// send 4 message priority MEDIUM
produceMessages(4, 4, "TestQ");
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
LOG.info("On iteration {}", i);
Thread.sleep(500);
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list " + consumeList.size());
// compare lists
assertEquals("message 1 should be priority high", 5, consumeList.get(0).getJMSPriority());
assertEquals("message 2 should be priority medium", 4, consumeList.get(1).getJMSPriority());
assertEquals("message 3 should be priority medium", 4, consumeList.get(2).getJMSPriority());
assertEquals("message 4 should be priority medium", 4, consumeList.get(3).getJMSPriority());
assertEquals("message 5 should be priority medium", 4, consumeList.get(4).getJMSPriority());
}
}
@Test(timeout=120000)
public void testPriorityMessagesMoreThanPageSize() throws Exception {
final int numToSend = 450;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
// ensure we get expiry processing
Thread.sleep(700);
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Thread.sleep(500);
LOG.info("On iteration {}", i);
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
// compare lists
assertFalse("Consumed list should not be empty", consumeList.isEmpty());
assertEquals("message 1 should be priority high", 5, consumeList.get(0).getJMSPriority());
for (int j = 1; j < (numToSend - 1); j++) {
assertEquals("message " + j + " should be priority medium", 4, consumeList.get(j).getJMSPriority());
}
}
}
@Test(timeout=120000)
public void testLongLivedPriorityConsumer() throws Exception {
final int numToSend = 150;
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue("TestQ"));
connection.start();
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Message message = consumer.receive(4000);
assertEquals("message should be priority high", 5, message.getJMSPriority());
}
} finally {
connection.close();
}
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
for (Message message : consumeList) {
assertEquals("should be priority medium", 4, message.getJMSPriority());
}
}
@Test(timeout=120000)
public void testPriorityMessagesWithJmsBrowser() throws Exception {
final int numToSend = 250;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend - 1, 4, "TestQ");
ArrayList<Message> browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
// send 1 message priority HIGH
produceMessages(1, 5, "TestQ");
Thread.sleep(500);
LOG.info("On iteration {}", i);
Message message = consumeOneMessage("TestQ");
assertNotNull(message);
assertEquals(5, message.getJMSPriority());
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list {}", consumeList.size());
// compare lists
// assertEquals("Iteration: " + i
// +", message 1 should be priority high", 5,
// consumeList.get(0).getJMSPriority());
for (int j = 1; j < (numToSend - 1); j++) {
assertEquals("Iteration: " + i + ", message " + j + " should be priority medium", 4, consumeList.get(j).getJMSPriority());
}
}
}
@Test(timeout=120000)
public void testJmsBrowserGetsPagedIn() throws Exception {
final int numToSend = 10;
for (int i = 0; i < ITERATIONS; i++) {
produceMessages(numToSend, 4, "TestQ");
ArrayList<Message> browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
assertEquals(0, browsed.size());
Message message = consumeOneMessage("TestQ", Session.CLIENT_ACKNOWLEDGE);
assertNotNull(message);
browsed = browseMessages("TestQ");
LOG.info("Browsed: {}", browsed.size());
assertEquals("see only the paged in for pull", 1, browsed.size());
// consume messages
ArrayList<Message> consumeList = consumeMessages("TestQ");
LOG.info("Consumed list " + consumeList.size());
assertEquals(numToSend, consumeList.size());
}
}
private void produceMessages(int numberOfMessages, int priority, String queueName) throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
connectionFactory.setConnectionIDPrefix("pri-" + priority);
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(new ActiveMQQueue(queueName));
connection.start();
for (int i = 0; i < numberOfMessages; i++) {
BytesMessage m = session.createBytesMessage();
m.writeBytes(PAYLOAD);
m.setJMSPriority(priority);
producer.send(m, Message.DEFAULT_DELIVERY_MODE, m.getJMSPriority(), Message.DEFAULT_TIME_TO_LIVE);
}
} finally {
if (connection != null) {
connection.close();
}
}
}
private ArrayList<Message> consumeMessages(String queueName) throws Exception {
ArrayList<Message> returnedMessages = new ArrayList<Message>();
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(queueName));
connection.start();
boolean finished = false;
while (!finished) {
Message message = consumer.receive(1000);
if (message == null) {
finished = true;
}
if (message != null) {
returnedMessages.add(message);
}
}
consumer.close();
return returnedMessages;
} finally {
if (connection != null) {
connection.close();
}
}
}
private Message consumeOneMessage(String queueName) throws Exception {
return consumeOneMessage(queueName, Session.AUTO_ACKNOWLEDGE);
}
private Message consumeOneMessage(String queueName, int ackMode) throws Exception {
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, ackMode);
MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(queueName));
connection.start();
return consumer.receive(2000);
} finally {
if (connection != null) {
connection.close();
}
}
}
private ArrayList<Message> browseMessages(String queueName) throws Exception {
ArrayList<Message> returnedMessages = new ArrayList<Message>();
ConnectionFactory connectionFactory = new ActiveMQConnectionFactory(broker.getTransportConnectorByScheme("tcp").getPublishableConnectString());
Connection connection = connectionFactory.createConnection();
try {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
QueueBrowser consumer = session.createBrowser(new ActiveMQQueue(queueName));
connection.start();
Enumeration<?> enumeration = consumer.getEnumeration();
while (enumeration.hasMoreElements()) {
Message message = (Message) enumeration.nextElement();
returnedMessages.add(message);
}
return returnedMessages;
} finally {
if (connection != null) {
connection.close();
}
}
}
private BrokerService createBroker() throws Exception {
BrokerService broker = new BrokerService();
broker.setDeleteAllMessagesOnStartup(true);
// add the policy entries
PolicyMap policyMap = new PolicyMap();
List<PolicyEntry> entries = new ArrayList<PolicyEntry>();
PolicyEntry pe = new PolicyEntry();
pe.setPrioritizedMessages(true);
pe.setExpireMessagesPeriod(500);
pe.setMaxPageSize(100);
pe.setMaxExpirePageSize(0);
pe.setMaxBrowsePageSize(0);
pe.setQueuePrefetch(0);
pe.setLazyDispatch(true);
pe.setOptimizedDispatch(true);
pe.setUseCache(false);
pe.setQueue(">");
entries.add(pe);
policyMap.setPolicyEntries(entries);
broker.setDestinationPolicy(policyMap);
broker.addConnector("tcp://0.0.0.0:0");
return broker;
}
} | longer receive timeout on first message, intermittent ci failure
| activemq-unit-tests/src/test/java/org/apache/activemq/usecases/QueueZeroPrefetchLazyDispatchPriorityTest.java | longer receive timeout on first message, intermittent ci failure | <ide><path>ctivemq-unit-tests/src/test/java/org/apache/activemq/usecases/QueueZeroPrefetchLazyDispatchPriorityTest.java
<ide> boolean finished = false;
<ide>
<ide> while (!finished) {
<del> Message message = consumer.receive(1000);
<add> Message message = consumer.receive(returnedMessages.isEmpty() ? 5000 : 1000);
<ide> if (message == null) {
<ide> finished = true;
<ide> }
<ide> MessageConsumer consumer = session.createConsumer(new ActiveMQQueue(queueName));
<ide> connection.start();
<ide>
<del> return consumer.receive(2000);
<add> return consumer.receive(5000);
<ide> } finally {
<ide> if (connection != null) {
<ide> connection.close(); |
|
Java | apache-2.0 | error: pathspec 'src/algorithm/other/DFS_BFS.java' did not match any file(s) known to git
| f58792b66a0616d5ee321bbbc6e439b58655107a | 1 | pingcai/LeetCode,pingcai/Algorithm | package algorithm.other;
import java.util.LinkedList;
import java.util.Stack;
/**
* date:2017/9/7 18:52
* author:黄平财
* mail:[email protected]
*/
public class DFS_BFS {
/**
* 9
* / \
* 8 10
* /\ \
* 6 5 12
* \
* 16
* 深度:9 8 6 5 16 10 12
* 广度:9 8 10 6 5 12 16
*
* @param args
*/
public static void main(String[] args) {
Node root = new Node(9);
root.appendLeft(new Node(8)).appendLeft(new Node(6));
root.appendRight(new Node(10)).appendRight(new Node(12));
root.left.appendRight(new Node(5)).appendRight(new Node(16));
bfsRecur(root);
}
private static void bfsRecur(Node root) {
/**
* 广度优先递归暂时没想到递归的思路
* 广度优先是层级遍历,用节点做递归貌似无解
* 可以用队列做递归,但还是队列实现
*/
if (null != root) {
LinkedList<Node> nodes = new LinkedList<>();
nodes.offer(root);
r(nodes);
}
}
private static void r(LinkedList<Node> nodes) {
if (!nodes.isEmpty()) {
Node cur = nodes.pop();
visit(cur);
if (null != cur.left) {
nodes.offer(cur.left);
}
if (cur.right != null) {
nodes.offer(cur.right);
}
r(nodes);
}
}
/**
* 广度优先
*
* @param root
*/
private static void bfs(Node root) {
if (root != null) {
LinkedList<Node> list = new LinkedList<>();
list.push(root);
Node cur = null;
while (!list.isEmpty()) {
cur = list.pop();
visit(cur);
if (null != cur.left) {
list.offer(cur.left);
}
if (null != cur.right) {
list.offer(cur.right);
}
}
}
}
/**
* 深度优先(非递归)
*/
private static void dfs(Node root) {
if (root != null) {
Stack<Node> stack = new Stack<>();
stack.push(root);
Node cur = null; //当前节点
while (!stack.empty()) {
cur = stack.pop();
visit(cur);
if (cur.right != null) {
stack.push(cur.right);
}
if (cur.left != null) {
stack.push(cur.left);
}
}
}
}
/**
* 深度优先(递归)
*/
private static void dfsRecur(Node root) {
if (root != null) {
visit(root);
dfsRecur(root.left);
dfsRecur(root.right);
}
}
private static void visit(Node root) {
System.out.print(root.val + " ");
}
private static class Node {
int val;
Node left;
Node right;
public Node(int i) {
val = i;
}
public Node appendLeft(Node node) {
this.left = node;
return node;
}
public Node appendRight(Node node) {
this.right = node;
return node;
}
}
}
| src/algorithm/other/DFS_BFS.java | bfs&dfs
| src/algorithm/other/DFS_BFS.java | bfs&dfs | <ide><path>rc/algorithm/other/DFS_BFS.java
<add>package algorithm.other;
<add>
<add>import java.util.LinkedList;
<add>import java.util.Stack;
<add>
<add>/**
<add> * date:2017/9/7 18:52
<add> * author:黄平财
<add> * mail:[email protected]
<add> */
<add>public class DFS_BFS {
<add> /**
<add> * 9
<add> * / \
<add> * 8 10
<add> * /\ \
<add> * 6 5 12
<add> * \
<add> * 16
<add> * 深度:9 8 6 5 16 10 12
<add> * 广度:9 8 10 6 5 12 16
<add> *
<add> * @param args
<add> */
<add> public static void main(String[] args) {
<add> Node root = new Node(9);
<add> root.appendLeft(new Node(8)).appendLeft(new Node(6));
<add> root.appendRight(new Node(10)).appendRight(new Node(12));
<add> root.left.appendRight(new Node(5)).appendRight(new Node(16));
<add>
<add> bfsRecur(root);
<add>
<add> }
<add>
<add> private static void bfsRecur(Node root) {
<add> /**
<add> * 广度优先递归暂时没想到递归的思路
<add> * 广度优先是层级遍历,用节点做递归貌似无解
<add> * 可以用队列做递归,但还是队列实现
<add> */
<add> if (null != root) {
<add> LinkedList<Node> nodes = new LinkedList<>();
<add> nodes.offer(root);
<add> r(nodes);
<add> }
<add> }
<add>
<add> private static void r(LinkedList<Node> nodes) {
<add> if (!nodes.isEmpty()) {
<add> Node cur = nodes.pop();
<add> visit(cur);
<add> if (null != cur.left) {
<add> nodes.offer(cur.left);
<add> }
<add> if (cur.right != null) {
<add> nodes.offer(cur.right);
<add> }
<add> r(nodes);
<add> }
<add> }
<add>
<add> /**
<add> * 广度优先
<add> *
<add> * @param root
<add> */
<add> private static void bfs(Node root) {
<add> if (root != null) {
<add> LinkedList<Node> list = new LinkedList<>();
<add> list.push(root);
<add> Node cur = null;
<add> while (!list.isEmpty()) {
<add> cur = list.pop();
<add> visit(cur);
<add> if (null != cur.left) {
<add> list.offer(cur.left);
<add> }
<add> if (null != cur.right) {
<add> list.offer(cur.right);
<add> }
<add> }
<add> }
<add> }
<add>
<add> /**
<add> * 深度优先(非递归)
<add> */
<add> private static void dfs(Node root) {
<add> if (root != null) {
<add> Stack<Node> stack = new Stack<>();
<add> stack.push(root);
<add> Node cur = null; //当前节点
<add> while (!stack.empty()) {
<add> cur = stack.pop();
<add> visit(cur);
<add> if (cur.right != null) {
<add> stack.push(cur.right);
<add> }
<add> if (cur.left != null) {
<add> stack.push(cur.left);
<add> }
<add> }
<add> }
<add> }
<add>
<add> /**
<add> * 深度优先(递归)
<add> */
<add> private static void dfsRecur(Node root) {
<add> if (root != null) {
<add> visit(root);
<add> dfsRecur(root.left);
<add> dfsRecur(root.right);
<add> }
<add> }
<add>
<add> private static void visit(Node root) {
<add> System.out.print(root.val + " ");
<add> }
<add>
<add>
<add> private static class Node {
<add> int val;
<add> Node left;
<add> Node right;
<add>
<add> public Node(int i) {
<add> val = i;
<add> }
<add>
<add> public Node appendLeft(Node node) {
<add> this.left = node;
<add> return node;
<add> }
<add>
<add> public Node appendRight(Node node) {
<add> this.right = node;
<add> return node;
<add> }
<add> }
<add>} |
|
Java | mit | f63e9fc184189deea0c39dd61fbb47b9dad424da | 0 | ZeusWPI/hydra,ZeusWPI/hydra,ZeusWPI/hydra | /**
*
* @author Tom Naessens [email protected] 3de Bachelor Informatica
* Universiteit Gent
*
*/
package be.ugent.zeus.hydra;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.text.util.Linkify;
import android.util.Log;
import android.view.View;
import android.view.ViewManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import be.ugent.zeus.hydra.data.Activity;
import be.ugent.zeus.hydra.util.facebook.event.data.AttendingStatus;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncComingGetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncComingSetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncFriendsGetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncInfoGetter;
import com.facebook.Session;
import com.facebook.SessionDefaultAudience;
import com.facebook.SessionState;
import com.facebook.UiLifecycleHelper;
import com.google.analytics.tracking.android.EasyTracker;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.List;
public class ActivityItemActivity extends AbstractSherlockActivity {
private int selected;
private UiLifecycleHelper uiHelper;
private Activity item;
private boolean fetched;
public class SessionStatusCallback implements Session.StatusCallback {
@Override
public void call(Session session, SessionState state, Exception exception) {
onSessionStateChange(session, state, exception);
}
}
private void onSessionStateChange(Session session, SessionState state, Exception exception) {
Log.i("FACEBOOK", "Onsessionstatechange: " + state.toString());
TextView guests = (TextView) findViewById(R.id.activity_item_guests);
ImageView image = (ImageView) findViewById(R.id.activity_item_image);
ImageView[] guestIcons = new ImageView[5];
guestIcons[0] = (ImageView) findViewById(R.id.activity_item_friends1);
guestIcons[1] = (ImageView) findViewById(R.id.activity_item_friends2);
guestIcons[2] = (ImageView) findViewById(R.id.activity_item_friends3);
guestIcons[3] = (ImageView) findViewById(R.id.activity_item_friends4);
guestIcons[4] = (ImageView) findViewById(R.id.activity_item_friends5);
Button button = (Button) findViewById(R.id.activity_item_button);
switch (state) {
case OPENED_TOKEN_UPDATED:
if (selected != -1) {
new AsyncComingSetter(this, item.facebook_id, button, AttendingStatus.values()[selected]).execute();
}
if (fetched) {
return;
}
case OPENED:
if (!fetched) {
fetched = true;
new AsyncInfoGetter(item.facebook_id, guests, image).execute();
new AsyncComingGetter(this, item.facebook_id, button).execute();
new AsyncFriendsGetter(item.facebook_id, guests, guestIcons).execute();
}
return;
case CLOSED:
case CREATED:
new AsyncInfoGetter(item.facebook_id, guests, image).execute();
for (ImageView imageView : guestIcons) {
imageView.setVisibility(View.GONE);
}
button.setVisibility(View.GONE);
return;
}
}
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setTitle(R.string.details);
setContentView(R.layout.activity_item);
selected = -1;
fetched = false;
/**
* Get the activity
*/
item = (Activity) getIntent().getSerializableExtra("item");
EasyTracker.getInstance().setContext(this);
EasyTracker.getTracker().sendView("Activity > " + item.title);
/**
* Facebook
*/
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
SessionStatusCallback statusCallback = new ActivityItemActivity.SessionStatusCallback();
uiHelper = new UiLifecycleHelper(this, statusCallback);
uiHelper.onCreate(icicle);
Session session = Session.getActiveSession();
if (session == null || !session.isOpened()) {
if (icicle != null) {
session = Session.restoreSession(this, null, statusCallback, icicle);
}
if (session == null) {
session = new Session(this);
}
if (session.getState().equals(SessionState.CREATED_TOKEN_LOADED)) {
session.openForRead(new Session.OpenRequest(this).setCallback(statusCallback));
} else if (session.getState().equals(SessionState.CREATED)) {
Session.openActiveSession(this, false, statusCallback);
}
onSessionStateChange(session, session.getState(), null);
} else if (session != null
&& (session.isOpened() || session.isClosed())) {
onSessionStateChange(session, session.getState(), null);
}
}
/**
* Image
*/
ImageView image = (ImageView) findViewById(R.id.activity_item_image);
image.setVisibility(View.INVISIBLE);
/**
* Title
*/
TextView title = (TextView) findViewById(R.id.activity_item_title);
title.setText(item.title);
/**
* Button
*/
final Button button = (Button) findViewById(R.id.activity_item_button);
if (item.facebook_id == null) {
button.setVisibility(View.GONE);
} else {
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
changeAttendingStatus(button, item.facebook_id);
}
});
}
/**
* Date
*/
TextView date = (TextView) findViewById(R.id.activity_item_date);
String datum =
new SimpleDateFormat("EEE dd MMMM", Hydra.LOCALE).format(item.startDate);
String start =
new SimpleDateFormat("HH:mm", Hydra.LOCALE).format(item.startDate);
String eind =
new SimpleDateFormat("HH:mm", Hydra.LOCALE).format(item.endDate);
date.setText(
String.format(getResources().getString(R.string.activity_item_time_location),
datum, start, eind));
/**
* Association
*/
TextView association = (TextView) findViewById(R.id.activity_item_association);
String poster = item.association.display_name;
if (item.association.full_name != null) {
poster += " (" + item.association.full_name + ")";
}
association.setText(
String.format(getResources().getString(R.string.activity_item_association_title), poster));
/**
* Location
*/
TextView location = (TextView) findViewById(R.id.activity_item_location);
View locationContainerSideBorder = (View) findViewById(R.id.activity_item_location_sideborder);
if (item.location == null || "".equals(item.location)) {
LinearLayout locationContainer = (LinearLayout) findViewById(R.id.activity_item_location_container);
View locationContainerBottomBorder = (View) findViewById(R.id.activity_item_location_bottomborder);
((ViewManager) locationContainer.getParent()).removeView(locationContainer);
((ViewManager) locationContainerBottomBorder.getParent()).removeView(locationContainerBottomBorder);
((ViewManager) locationContainerSideBorder.getParent()).removeView(locationContainerSideBorder);
} else {
location.setText(item.location);
ImageView directions = (ImageView) findViewById(R.id.activity_item_directions);
if (item.latitude != 0 && item.longitude != 0) {
directions.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
onDirectionsClick(item.location, item.latitude, item.longitude);
}
});
} else {
locationContainerSideBorder.setVisibility(View.INVISIBLE);
directions.setVisibility(View.INVISIBLE);
}
}
/**
* Facebook friends
*/
LinearLayout guestsContainer = (LinearLayout) findViewById(R.id.activity_item_guests_container);
View guestsBottomBorder = (View) findViewById(R.id.activity_item_guests_bottomborder);
ImageView external = (ImageView) findViewById(R.id.activity_item_facebook_external);
if (item.facebook_id == null || "".equals(item.facebook_id)) {
((ViewManager) guestsContainer.getParent()).removeView(guestsContainer);
((ViewManager) button.getParent()).removeView(button);
((ViewManager) guestsBottomBorder.getParent()).removeView(guestsBottomBorder);
} else {
final Context context = this;
external.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = getOpenFacebookIntent(context, item.facebook_id);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
});
}
/**
* Content
*/
View contentBottomBorder = (View) findViewById(R.id.activity_item_content_bottomborder);
TextView content = (TextView) findViewById(R.id.activity_item_content);
if (item.description == null || "".equals(item.description)) {
((ViewManager) content.getParent()).removeView(content);
((ViewManager) contentBottomBorder.getParent()).removeView(contentBottomBorder);
} else {
content.setText(Html.fromHtml(item.description.replace("\n", "<br>")));
content.setMovementMethod(LinkMovementMethod.getInstance());
Linkify.addLinks(content, Linkify.ALL);
}
/**
* More content
*/
LinearLayout moreContentContainer = (LinearLayout) findViewById(R.id.activity_item_more_content_container);
TextView moreContent = (TextView) findViewById(R.id.activity_item_more_content);
if (item.url == null || "".equals(item.url)) {
((ViewManager) moreContentContainer.getParent()).removeView(moreContentContainer);
} else {
moreContent.setText(item.url);
moreContentContainer.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (!item.url.contains("://")) item.url = "http://" + item.url;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(item.url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
});
}
}
public static Intent getOpenFacebookIntent(Context context, String id) {
// try {
// context.getPackageManager().getPackageInfo("com.facebook.katana", 0);
// return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format("fb://event/%s/", id)));
// } catch (Exception e) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format("https://www.facebook.com/events/%s/", id)));
// }
}
public void onDirectionsClick(String name, double latitude, double longitude) {
Intent intent = new Intent(this, ActivityLocationMap.class);
intent.putExtra("name", name);
intent.putExtra("lat", latitude);
intent.putExtra("lng", longitude);
startActivity(intent);
}
public void changeAttendingStatus(final Button button, final String id) {
final CharSequence[] choiceList = {getResources().getString(R.string.attending),
getResources().getString(R.string.maybe),
getResources().getString(R.string.declined)
};
new AlertDialog.Builder(this)
.setTitle("Status")
.setCancelable(true)
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
updateStatus(button, id);
}
})
.setNegativeButton("Cancel", null)
.setSingleChoiceItems(choiceList, selected, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
selected = which;
}
})
.create().show();
}
public void updateStatus(Button button, String id) {
Session session = Session.getActiveSession();
List<String> permissions = session.getPermissions();
if (!permissions.contains("rsvp_event")) {
List<String> newPermissions = Arrays.asList("rsvp_event");
session.requestNewPublishPermissions(
new Session.NewPermissionsRequest(this, newPermissions)
.setDefaultAudience(SessionDefaultAudience.FRIENDS)
.setCallback(new ActivityItemActivity.SessionStatusCallback()));
} else {
new AsyncComingSetter(this, id, button, AttendingStatus.values()[selected]).execute();
}
}
@Override
public void onResume() {
super.onResume();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onResume();
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onActivityResult(requestCode, resultCode, data);
}
}
@Override
public void onPause() {
super.onPause();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onPause();
}
}
@Override
public void onDestroy() {
super.onDestroy();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onDestroy();
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onSaveInstanceState(outState);
}
}
}
| android/src/be/ugent/zeus/hydra/ActivityItemActivity.java | /**
*
* @author Tom Naessens [email protected] 3de Bachelor Informatica
* Universiteit Gent
*
*/
package be.ugent.zeus.hydra;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.text.Html;
import android.text.method.LinkMovementMethod;
import android.text.util.Linkify;
import android.util.Log;
import android.view.View;
import android.view.ViewManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import be.ugent.zeus.hydra.data.Activity;
import be.ugent.zeus.hydra.util.facebook.event.data.AttendingStatus;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncComingGetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncComingSetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncFriendsGetter;
import be.ugent.zeus.hydra.util.facebook.event.tasks.AsyncInfoGetter;
import com.facebook.Session;
import com.facebook.SessionDefaultAudience;
import com.facebook.SessionState;
import com.facebook.UiLifecycleHelper;
import com.google.analytics.tracking.android.EasyTracker;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.List;
public class ActivityItemActivity extends AbstractSherlockActivity {
private int selected;
private UiLifecycleHelper uiHelper;
private Activity item;
private boolean fetched;
public class SessionStatusCallback implements Session.StatusCallback {
@Override
public void call(Session session, SessionState state, Exception exception) {
onSessionStateChange(session, state, exception);
}
}
private void onSessionStateChange(Session session, SessionState state, Exception exception) {
Log.i("FACEBOOK", "Onsessionstatechange: " + state.toString());
TextView guests = (TextView) findViewById(R.id.activity_item_guests);
ImageView image = (ImageView) findViewById(R.id.activity_item_image);
ImageView[] guestIcons = new ImageView[5];
guestIcons[0] = (ImageView) findViewById(R.id.activity_item_friends1);
guestIcons[1] = (ImageView) findViewById(R.id.activity_item_friends2);
guestIcons[2] = (ImageView) findViewById(R.id.activity_item_friends3);
guestIcons[3] = (ImageView) findViewById(R.id.activity_item_friends4);
guestIcons[4] = (ImageView) findViewById(R.id.activity_item_friends5);
Button button = (Button) findViewById(R.id.activity_item_button);
switch (state) {
case OPENED_TOKEN_UPDATED:
if (selected != -1) {
new AsyncComingSetter(this, item.facebook_id, button, AttendingStatus.values()[selected]).execute();
}
if (fetched) {
return;
}
case OPENED:
if (!fetched) {
fetched = true;
new AsyncInfoGetter(item.facebook_id, guests, image).execute();
new AsyncComingGetter(this, item.facebook_id, button).execute();
new AsyncFriendsGetter(item.facebook_id, guests, guestIcons).execute();
}
return;
case CLOSED:
case CREATED:
new AsyncInfoGetter(item.facebook_id, guests, image).execute();
for (ImageView imageView : guestIcons) {
imageView.setVisibility(View.GONE);
}
button.setVisibility(View.GONE);
return;
}
}
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setTitle(R.string.details);
setContentView(R.layout.activity_item);
selected = -1;
fetched = false;
/**
* Get the activity
*/
item = (Activity) getIntent().getSerializableExtra("item");
EasyTracker.getInstance().setContext(this);
EasyTracker.getTracker().sendView("Activity > " + item.title);
/**
* Facebook
*/
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
SessionStatusCallback statusCallback = new ActivityItemActivity.SessionStatusCallback();
uiHelper = new UiLifecycleHelper(this, statusCallback);
uiHelper.onCreate(icicle);
Session session = Session.getActiveSession();
if (session == null || !session.isOpened()) {
if (icicle != null) {
session = Session.restoreSession(this, null, statusCallback, icicle);
}
if (session == null) {
session = new Session(this);
}
if (session.getState().equals(SessionState.CREATED_TOKEN_LOADED)) {
session.openForRead(new Session.OpenRequest(this).setCallback(statusCallback));
} else if (session.getState().equals(SessionState.CREATED)) {
Session.openActiveSession(this, false, statusCallback);
}
onSessionStateChange(session, session.getState(), null);
} else if (session != null
&& (session.isOpened() || session.isClosed())) {
onSessionStateChange(session, session.getState(), null);
}
}
/**
* Image
*/
ImageView image = (ImageView) findViewById(R.id.activity_item_image);
image.setVisibility(View.INVISIBLE);
/**
* Title
*/
TextView title = (TextView) findViewById(R.id.activity_item_title);
title.setText(item.title);
/**
* Button
*/
final Button button = (Button) findViewById(R.id.activity_item_button);
if (item.facebook_id == null) {
button.setVisibility(View.GONE);
} else {
button.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
changeAttendingStatus(button, item.facebook_id);
}
});
}
/**
* Date
*/
TextView date = (TextView) findViewById(R.id.activity_item_date);
String datum =
new SimpleDateFormat("EEE dd MMMM", Hydra.LOCALE).format(item.startDate);
String start =
new SimpleDateFormat("HH:mm", Hydra.LOCALE).format(item.startDate);
String eind =
new SimpleDateFormat("HH:mm", Hydra.LOCALE).format(item.endDate);
date.setText(
String.format(getResources().getString(R.string.activity_item_time_location),
datum, start, eind));
/**
* Association
*/
TextView association = (TextView) findViewById(R.id.activity_item_association);
String poster = item.association.display_name;
if (item.association.full_name != null) {
poster += " (" + item.association.full_name + ")";
}
association.setText(
String.format(getResources().getString(R.string.activity_item_association_title), poster));
/**
* Location
*/
TextView location = (TextView) findViewById(R.id.activity_item_location);
View locationContainerSideBorder = (View) findViewById(R.id.activity_item_location_sideborder);
if (item.location == null || "".equals(item.location)) {
LinearLayout locationContainer = (LinearLayout) findViewById(R.id.activity_item_location_container);
View locationContainerBottomBorder = (View) findViewById(R.id.activity_item_location_bottomborder);
((ViewManager) locationContainer.getParent()).removeView(locationContainer);
((ViewManager) locationContainerBottomBorder.getParent()).removeView(locationContainerBottomBorder);
((ViewManager) locationContainerSideBorder.getParent()).removeView(locationContainerSideBorder);
} else {
location.setText(item.location);
ImageView directions = (ImageView) findViewById(R.id.activity_item_directions);
if (item.latitude != 0 && item.longitude != 0) {
directions.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
onDirectionsClick(item.location, item.latitude, item.longitude);
}
});
} else {
locationContainerSideBorder.setVisibility(View.INVISIBLE);
directions.setVisibility(View.INVISIBLE);
}
}
/**
* Facebook friends
*/
LinearLayout guestsContainer = (LinearLayout) findViewById(R.id.activity_item_guests_container);
View guestsBottomBorder = (View) findViewById(R.id.activity_item_guests_bottomborder);
ImageView external = (ImageView) findViewById(R.id.activity_item_facebook_external);
if (item.facebook_id == null || "".equals(item.facebook_id)) {
((ViewManager) guestsContainer.getParent()).removeView(guestsContainer);
((ViewManager) button.getParent()).removeView(button);
((ViewManager) guestsBottomBorder.getParent()).removeView(guestsBottomBorder);
} else {
final Context context = this;
external.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
Intent intent = getOpenFacebookIntent(context, item.facebook_id);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
});
}
/**
* Content
*/
View contentBottomBorder = (View) findViewById(R.id.activity_item_content_bottomborder);
TextView content = (TextView) findViewById(R.id.activity_item_content);
if (item.description == null || "".equals(item.description)) {
((ViewManager) content.getParent()).removeView(content);
((ViewManager) contentBottomBorder.getParent()).removeView(contentBottomBorder);
} else {
content.setText(Html.fromHtml(item.description.replace("\n", "<br>")));
content.setMovementMethod(LinkMovementMethod.getInstance());
Linkify.addLinks(content, Linkify.ALL);
}
/**
* More content
*/
LinearLayout moreContentContainer = (LinearLayout) findViewById(R.id.activity_item_more_content_container);
TextView moreContent = (TextView) findViewById(R.id.activity_item_more_content);
if (item.url == null || "".equals(item.url)) {
((ViewManager) moreContentContainer.getParent()).removeView(moreContentContainer);
} else {
moreContent.setText(item.url);
moreContentContainer.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (!item.url.contains("://"))
item.url = "http://" + item.url;
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(item.url));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(intent);
}
});
}
}
public static Intent getOpenFacebookIntent(Context context, String id) {
// try {
// context.getPackageManager().getPackageInfo("com.facebook.katana", 0);
// return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format("fb://event/%s/", id)));
// } catch (Exception e) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format("https://www.facebook.com/events/%s/", id)));
// }
}
public void onDirectionsClick(String name, double latitude, double longitude) {
Intent intent = new Intent(this, ActivityLocationMap.class);
intent.putExtra("name", name);
intent.putExtra("lat", latitude);
intent.putExtra("lng", longitude);
startActivity(intent);
}
public void changeAttendingStatus(final Button button, final String id) {
final CharSequence[] choiceList = {getResources().getString(R.string.attending),
getResources().getString(R.string.maybe),
getResources().getString(R.string.declined)
};
new AlertDialog.Builder(this)
.setTitle("Status")
.setCancelable(true)
.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
updateStatus(button, id);
}
})
.setNegativeButton("Cancel", null)
.setSingleChoiceItems(choiceList, selected, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
selected = which;
}
})
.create().show();
}
public void updateStatus(Button button, String id) {
Session session = Session.getActiveSession();
List<String> permissions = session.getPermissions();
if (!permissions.contains("rsvp_event")) {
List<String> newPermissions = Arrays.asList("rsvp_event");
session.requestNewPublishPermissions(
new Session.NewPermissionsRequest(this, newPermissions)
.setDefaultAudience(SessionDefaultAudience.FRIENDS)
.setCallback(new ActivityItemActivity.SessionStatusCallback()));
} else {
new AsyncComingSetter(this, id, button, AttendingStatus.values()[selected]).execute();
}
}
@Override
public void onResume() {
super.onResume();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onResume();
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onActivityResult(requestCode, resultCode, data);
}
}
@Override
public void onPause() {
super.onPause();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onPause();
}
}
@Override
public void onDestroy() {
super.onDestroy();
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onDestroy();
}
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (item.facebook_id != null && !"".equals(item.facebook_id)) {
uiHelper.onSaveInstanceState(outState);
}
}
}
| Style fix according messed up Android style
| android/src/be/ugent/zeus/hydra/ActivityItemActivity.java | Style fix according messed up Android style | <ide><path>ndroid/src/be/ugent/zeus/hydra/ActivityItemActivity.java
<ide> moreContent.setText(item.url);
<ide> moreContentContainer.setOnClickListener(new View.OnClickListener() {
<ide> public void onClick(View v) {
<del> if (!item.url.contains("://"))
<del> item.url = "http://" + item.url;
<add> if (!item.url.contains("://")) item.url = "http://" + item.url;
<ide> Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(item.url));
<ide> intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
<ide> startActivity(intent); |
|
Java | lgpl-2.1 | bd2000875c4c681b18cd2bdcab436a959b3e6d46 | 0 | MariaDB/mariadb-connector-j,MariaDB/mariadb-connector-j | package org.mariadb.jdbc;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import java.sql.*;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class DateTest extends BaseTest {
/**
* Initialization.
* @throws SQLException exception
*/
@BeforeClass()
public static void initClass() throws SQLException {
createTable("dtest", "d date");
createTable("date_test2", "id int not null primary key auto_increment, d_from datetime ,d_to datetime");
createTable("timetest", "t time");
createTable("timetest2", "t time");
createTable("timestampzerotest", "ts timestamp, dt datetime, dd date");
createTable("dtest", "d datetime");
createTable("dtest2", "d date");
createTable("dtest3", "d date");
createTable("dtest4", "d time");
createTable("date_test3", " x date");
createTable("date_test4", "x date");
}
@Test
public void dateTestLegacy() throws SQLException {
dateTest(true);
}
@Test
public void dateTestWithoutLegacy() throws SQLException {
dateTest(false);
}
/**
* Date testing.
* @param useLegacy use legacy client side timezone or server side timezone.
* @throws SQLException exception
*/
public void dateTest(boolean useLegacy) throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=" + useLegacy
+ "&serverTimezone=+5:00&maximizeMysqlCompatibility=false&useServerPrepStmts=true");
setSessionTimeZone(connection, "+5:00");
createTable("date_test", "id int not null primary key auto_increment, d_test date,dt_test datetime, "
+ "t_test time");
Statement stmt = connection.createStatement();
java.sql.Date date = java.sql.Date.valueOf("2009-01-17");
Timestamp timestamp = Timestamp.valueOf("2009-01-17 15:41:01");
Time time = Time.valueOf("23:59:59");
PreparedStatement ps = connection.prepareStatement("insert into date_test (d_test, dt_test, t_test) "
+ "values (?,?,?)");
ps.setDate(1, date);
ps.setTimestamp(2, timestamp);
ps.setTime(3, time);
ps.executeUpdate();
ResultSet rs = stmt.executeQuery("select d_test, dt_test, t_test from date_test");
assertEquals(true, rs.next());
java.sql.Date date2 = rs.getDate(1);
java.sql.Date date3 = rs.getDate("d_test");
Time time2 = rs.getTime(3);
assertEquals(date.toString(), date2.toString());
assertEquals(date.toString(), date3.toString());
assertEquals(time.toString(), time2.toString());
Time time3 = rs.getTime("t_test");
assertEquals(time.toString(), time3.toString());
Timestamp timestamp2 = rs.getTimestamp(2);
assertEquals(timestamp.toString(), timestamp2.toString());
Timestamp timestamp3 = rs.getTimestamp("dt_test");
assertEquals(timestamp.toString(), timestamp3.toString());
} finally {
connection.close();
}
}
@Test
public void dateRangeTest() throws SQLException {
PreparedStatement ps = sharedConnection.prepareStatement("insert into date_test2 (id, d_from, d_to) values "
+ "(1, ?,?)");
Timestamp timestamp1 = Timestamp.valueOf("2009-01-17 15:41:01");
Timestamp timestamp2 = Timestamp.valueOf("2015-01-17 15:41:01");
ps.setTimestamp(1, timestamp1);
ps.setTimestamp(2, timestamp2);
ps.executeUpdate();
PreparedStatement ps1 = sharedConnection.prepareStatement("select d_from, d_to from date_test2 "
+ "where d_from <= ? and d_to >= ?");
Timestamp timestamp3 = Timestamp.valueOf("2014-01-17 15:41:01");
ps1.setTimestamp(1, timestamp3);
ps1.setTimestamp(2, timestamp3);
ResultSet rs = ps1.executeQuery();
assertEquals(true, rs.next());
Timestamp ts1 = rs.getTimestamp(1);
Timestamp ts2 = rs.getTimestamp(2);
assertEquals(ts1.toString(), timestamp1.toString());
assertEquals(ts2.toString(), timestamp2.toString());
}
@Test(expected = SQLException.class)
public void dateTest2() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 1");
rs.next();
rs.getDate(1);
}
@Test(expected = SQLException.class)
public void dateTest3() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 1 as a");
rs.next();
rs.getDate("a");
}
@Test(expected = SQLException.class)
public void timeTest3() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 'aaa' as a");
rs.next();
rs.getTimestamp("a");
}
@Test
public void yearTest() throws SQLException {
Assume.assumeTrue(isMariadbServer());
createTable("yeartest", "y1 year, y2 year(2)");
sharedConnection.createStatement().execute("insert into yeartest values (null, null), (1901, 70), (0, 0), "
+ "(2155, 69)");
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select * from yeartest");
Date[] data1 = new Date[]{null, Date.valueOf("1901-01-01"), Date.valueOf("0000-01-01"),
Date.valueOf("2155-01-01")};
Date[] data2 = new Date[]{null, Date.valueOf("1970-01-01"), Date.valueOf("2000-01-01"),
Date.valueOf("2069-01-01")};
int count = 0;
while (rs.next()) {
assertEquals(data1[count], rs.getObject(1));
assertEquals(data2[count], rs.getObject(2));
count++;
}
}
@Test
public void timeTestLegacy() throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=true&serverTimezone=+05:00");
setSessionTimeZone(connection, "+05:00");
connection.createStatement().execute("insert into timetest values (null), ('-838:59:59'), ('00:00:00'), "
+ "('838:59:59')");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timetest");
Time[] data = new Time[]{null, Time.valueOf("-838:59:59"), Time.valueOf("00:00:00"),
Time.valueOf("838:59:59")};
int count = 0;
while (rs.next()) {
Time t1 = data[count];
Time t2 = (Time) rs.getObject(1);
assertEquals(t1, t2);
count++;
}
rs.close();
rs = stmt.executeQuery("select '11:11:11'");
rs.next();
Calendar cal = Calendar.getInstance();
assertEquals(rs.getTime(1, cal).toString(), "11:11:11");
} finally {
connection.close();
}
}
@Test
public void timeTest() throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=false&serverTimezone=+5:00");
setSessionTimeZone(connection, "+5:00");
connection.createStatement().execute("insert into timetest2 values (null), ('00:00:00'), ('23:59:59')");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timetest2");
Time[] data = new Time[]{null, Time.valueOf("00:00:00"), Time.valueOf("23:59:59")};
int count = 0;
while (rs.next()) {
Time t1 = data[count];
Time t2 = (Time) rs.getObject(1);
assertEquals(t1, t2);
count++;
}
rs.close();
rs = stmt.executeQuery("select '11:11:11'");
rs.next();
Calendar cal = Calendar.getInstance();
assertEquals(rs.getTime(1, cal).toString(), "11:11:11");
} finally {
if (connection != null) {
connection.close();
}
}
}
@Test
public void timestampZeroTest() throws SQLException {
assertTrue(isMariadbServer());
String timestampZero = "0000-00-00 00:00:00";
String dateZero = "0000-00-00";
sharedConnection.createStatement().execute("insert into timestampzerotest values ('"
+ timestampZero + "', '" + timestampZero + "', '" + dateZero + "')");
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timestampzerotest");
Timestamp ts = null;
Timestamp datetime = null;
Date date = null;
while (rs.next()) {
assertEquals(null, rs.getObject(1));
ts = rs.getTimestamp(1);
assertEquals(rs.wasNull(), true);
datetime = rs.getTimestamp(2);
assertEquals(rs.wasNull(), true);
date = rs.getDate(3);
assertEquals(rs.wasNull(), true);
}
rs.close();
assertEquals(ts, null);
assertEquals(datetime, null);
assertEquals(date, null);
}
@Test
public void javaUtilDateInPreparedStatementAsTimeStamp() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest values(?)");
ps.setObject(1, currentDate, Types.TIMESTAMP);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest");
rs.next();
/* Check that time is correct, up to seconds precision */
Assert.assertTrue(Math.abs((currentDate.getTime() - rs.getTimestamp(1).getTime())) <= 1000);
}
@Test
public void nullTimestampTest() throws SQLException {
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest2 values(null)");
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest2 where d is null");
rs.next();
Calendar cal = new GregorianCalendar();
assertEquals(null, rs.getTimestamp(1, cal));
}
@SuppressWarnings("deprecation")
@Test
public void javaUtilDateInPreparedStatementAsDate() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest3 values(?)");
ps.setObject(1, currentDate, Types.DATE);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest3");
rs.next();
/* Check that time is correct, up to seconds precision */
assertEquals(currentDate.getYear(), rs.getDate(1).getYear());
assertEquals(currentDate.getMonth(), rs.getDate(1).getMonth());
assertEquals(currentDate.getDay(), rs.getDate(1).getDay());
}
@SuppressWarnings("deprecation")
@Test
public void javaUtilDateInPreparedStatementAsTime() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest4 values(?)");
ps.setObject(1, currentDate, Types.TIME);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest4");
rs.next();
assertEquals(currentDate.getHours(), rs.getTime(1).getHours());
/* Check that time is correct, up to seconds precision */
if (isMariadbServer()) {
assertEquals(currentDate.getMinutes(), rs.getTime(1).getMinutes());
assertEquals(currentDate.getSeconds(), rs.getTime(1).getSeconds());
} else {
//mysql 1 seconde precision
Assert.assertTrue(Math.abs(currentDate.getMinutes() - rs.getTime(1).getMinutes()) <= 1);
Assert.assertTrue(Math.abs(currentDate.getSeconds() - rs.getTime(1).getSeconds()) <= 1);
}
}
@Test
public void serverTimezone() throws Exception {
TimeZone tz = TimeZone.getDefault();
Connection connection = null;
try {
connection = setConnection("&serverTimezone=+5:00");
setSessionTimeZone(connection, "+5:00");
java.util.Date now = new java.util.Date();
TimeZone canadaTimeZone = TimeZone.getTimeZone("GMT+5:00");
long clientOffset = tz.getOffset(now.getTime());
long serverOffser = canadaTimeZone.getOffset(System.currentTimeMillis());
long totalOffset = serverOffser - clientOffset;
PreparedStatement ps = connection.prepareStatement("select now()");
ResultSet rs = ps.executeQuery();
rs.next();
java.sql.Timestamp ts = rs.getTimestamp(1);
long differenceToServer = ts.getTime() - now.getTime();
long diff = Math.abs(differenceToServer - totalOffset);
log.trace("diff : " + diff);
/* query take less than a second but taking in account server and client time second diff ... */
assertTrue(diff < 5000);
ps = connection.prepareStatement("select utc_timestamp(), ?");
ps.setObject(1, now);
rs = ps.executeQuery();
rs.next();
ts = rs.getTimestamp(1);
java.sql.Timestamp ts2 = rs.getTimestamp(2);
long diff2 = Math.abs(ts.getTime() - ts2.getTime()) - clientOffset;
assertTrue(diff2 < 5000); /* query take less than a second */
} finally {
connection.close();
}
}
/**
* Conj-107.
*
* @throws SQLException exception
*/
@Test
public void timestampMillisecondsTest() throws SQLException {
Statement statement = sharedConnection.createStatement();
boolean isMariadbServer = isMariadbServer();
if (isMariadbServer) {
createTable("tt", "id decimal(10), create_time datetime(6)");
statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22.123456')");
} else {
createTable("tt", "id decimal(10), create_time datetime");
statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22')");
}
PreparedStatement ps = sharedConnection.prepareStatement("insert into tt (id, create_time) values (?,?)");
ps.setInt(1, 2);
Timestamp writeTs = new Timestamp(1273017612999L);
Timestamp writeTsWithoutMilliSec = new Timestamp(1273017612999L);
ps.setTimestamp(2, writeTs);
ps.execute();
ResultSet rs = statement.executeQuery("SELECT * FROM tt");
assertTrue(rs.next());
if (isMariadbServer) {
assertTrue("2013-07-18 13:44:22.123456".equals(rs.getString(2)));
} else {
assertTrue("2013-07-18 13:44:22".equals(rs.getString(2)));
}
assertTrue(rs.next());
Timestamp readTs = rs.getTimestamp(2);
if (isMariadbServer) {
assertEquals(writeTs, readTs);
} else {
assertEquals(writeTs, writeTsWithoutMilliSec);
}
}
@Test
public void dateTestWhenServerDifference() throws Throwable {
Connection connection = null;
try {
connection = setConnection("&serverTimezone=UTC");
PreparedStatement pst = connection.prepareStatement("insert into date_test3 values (?)");
java.sql.Date date = java.sql.Date.valueOf("2013-02-01");
pst.setDate(1, date);
pst.execute();
pst = connection.prepareStatement("select x from date_test3 WHERE x = ?");
pst.setDate(1, date);
ResultSet rs = pst.executeQuery();
rs.next();
Date dd = rs.getDate(1);
assertEquals(dd, date);
} finally {
connection.close();
}
}
@Test
public void dateTestWhenServerDifferenceClient() throws Throwable {
Connection connection = null;
try {
connection = setConnection("&serverTimezone=UTC");
PreparedStatement pst = connection.prepareStatement("/*CLIENT*/insert into date_test4 values (?)");
java.sql.Date date = java.sql.Date.valueOf("2013-02-01");
pst.setDate(1, date);
pst.execute();
pst = connection.prepareStatement("/*CLIENT*/ select x from date_test4 WHERE x = ?");
pst.setDate(1, date);
ResultSet rs = pst.executeQuery();
rs.next();
Date dd = rs.getDate(1);
assertEquals(dd, date);
} finally {
connection.close();
}
}
}
| src/test/java/org/mariadb/jdbc/DateTest.java | package org.mariadb.jdbc;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import java.sql.*;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class DateTest extends BaseTest {
/**
* Initialization.
* @throws SQLException exception
*/
@BeforeClass()
public static void initClass() throws SQLException {
createTable("dtest", "d date");
createTable("date_test2", "id int not null primary key auto_increment, d_from datetime ,d_to datetime");
createTable("timetest", "t time");
createTable("timetest2", "t time");
createTable("timestampzerotest", "ts timestamp, dt datetime, dd date");
createTable("dtest", "d datetime");
createTable("dtest2", "d date");
createTable("dtest3", "d date");
createTable("dtest4", "d time");
createTable("date_test3", " x date");
createTable("date_test4", "x date");
}
@Test
public void dateTestLegacy() throws SQLException {
dateTest(true);
}
@Test
public void dateTestWithoutLegacy() throws SQLException {
dateTest(false);
}
/**
* Date testing.
* @param useLegacy use legacy client side timezone or server side timezone.
* @throws SQLException exception
*/
public void dateTest(boolean useLegacy) throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=" + useLegacy
+ "&serverTimezone=+5:00&maximizeMysqlCompatibility=false&useServerPrepStmts=true");
setSessionTimeZone(connection, "+5:00");
createTable("date_test", "id int not null primary key auto_increment, d_test date,dt_test datetime, "
+ "t_test time");
Statement stmt = connection.createStatement();
java.sql.Date date = java.sql.Date.valueOf("2009-01-17");
Timestamp timestamp = Timestamp.valueOf("2009-01-17 15:41:01");
Time time = Time.valueOf("23:59:59");
PreparedStatement ps = connection.prepareStatement("insert into date_test (d_test, dt_test, t_test) "
+ "values (?,?,?)");
ps.setDate(1, date);
ps.setTimestamp(2, timestamp);
ps.setTime(3, time);
ps.executeUpdate();
ResultSet rs = stmt.executeQuery("select d_test, dt_test, t_test from date_test");
assertEquals(true, rs.next());
java.sql.Date date2 = rs.getDate(1);
java.sql.Date date3 = rs.getDate("d_test");
Time time2 = rs.getTime(3);
assertEquals(date.toString(), date2.toString());
assertEquals(date.toString(), date3.toString());
assertEquals(time.toString(), time2.toString());
Time time3 = rs.getTime("t_test");
assertEquals(time.toString(), time3.toString());
Timestamp timestamp2 = rs.getTimestamp(2);
assertEquals(timestamp.toString(), timestamp2.toString());
Timestamp timestamp3 = rs.getTimestamp("dt_test");
assertEquals(timestamp.toString(), timestamp3.toString());
} finally {
connection.close();
}
}
@Test
public void dateRangeTest() throws SQLException {
PreparedStatement ps = sharedConnection.prepareStatement("insert into date_test2 (id, d_from, d_to) values "
+ "(1, ?,?)");
Timestamp timestamp1 = Timestamp.valueOf("2009-01-17 15:41:01");
Timestamp timestamp2 = Timestamp.valueOf("2015-01-17 15:41:01");
ps.setTimestamp(1, timestamp1);
ps.setTimestamp(2, timestamp2);
ps.executeUpdate();
PreparedStatement ps1 = sharedConnection.prepareStatement("select d_from, d_to from date_test2 "
+ "where d_from <= ? and d_to >= ?");
Timestamp timestamp3 = Timestamp.valueOf("2014-01-17 15:41:01");
ps1.setTimestamp(1, timestamp3);
ps1.setTimestamp(2, timestamp3);
ResultSet rs = ps1.executeQuery();
assertEquals(true, rs.next());
Timestamp ts1 = rs.getTimestamp(1);
Timestamp ts2 = rs.getTimestamp(2);
assertEquals(ts1.toString(), timestamp1.toString());
assertEquals(ts2.toString(), timestamp2.toString());
}
@Test(expected = SQLException.class)
public void dateTest2() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 1");
rs.next();
rs.getDate(1);
}
@Test(expected = SQLException.class)
public void dateTest3() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 1 as a");
rs.next();
rs.getDate("a");
}
@Test(expected = SQLException.class)
public void timeTest3() throws SQLException {
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select 'aaa' as a");
rs.next();
rs.getTimestamp("a");
}
@Test
public void yearTest() throws SQLException {
Assume.assumeTrue(isMariadbServer());
createTable("yeartest", "y1 year, y2 year(2)");
sharedConnection.createStatement().execute("insert into yeartest values (null, null), (1901, 70), (0, 0), "
+ "(2155, 69)");
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select * from yeartest");
Date[] data1 = new Date[]{null, Date.valueOf("1901-01-01"), Date.valueOf("0000-01-01"),
Date.valueOf("2155-01-01")};
Date[] data2 = new Date[]{null, Date.valueOf("1970-01-01"), Date.valueOf("2000-01-01"),
Date.valueOf("2069-01-01")};
int count = 0;
while (rs.next()) {
assertEquals(data1[count], rs.getObject(1));
assertEquals(data2[count], rs.getObject(2));
count++;
}
}
@Test
public void timeTestLegacy() throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=true&serverTimezone=+05:00");
setSessionTimeZone(connection, "+05:00");
connection.createStatement().execute("insert into timetest values (null), ('-838:59:59'), ('00:00:00'), "
+ "('838:59:59')");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timetest");
Time[] data = new Time[]{null, Time.valueOf("-838:59:59"), Time.valueOf("00:00:00"),
Time.valueOf("838:59:59")};
int count = 0;
while (rs.next()) {
Time t1 = data[count];
Time t2 = (Time) rs.getObject(1);
assertEquals(t1, t2);
count++;
}
rs.close();
rs = stmt.executeQuery("select '11:11:11'");
rs.next();
Calendar cal = Calendar.getInstance();
assertEquals(rs.getTime(1, cal).toString(), "11:11:11");
} finally {
connection.close();
}
}
@Test
public void timeTest() throws SQLException {
Connection connection = null;
try {
connection = setConnection("&useLegacyDatetimeCode=false&serverTimezone=+5:00");
setSessionTimeZone(connection, "+5:00");
connection.createStatement().execute("insert into timetest2 values (null), ('00:00:00'), ('23:59:59')");
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timetest2");
Time[] data = new Time[]{null, Time.valueOf("00:00:00"), Time.valueOf("23:59:59")};
int count = 0;
while (rs.next()) {
Time t1 = data[count];
Time t2 = (Time) rs.getObject(1);
assertEquals(t1, t2);
count++;
}
rs.close();
rs = stmt.executeQuery("select '11:11:11'");
rs.next();
Calendar cal = Calendar.getInstance();
assertEquals(rs.getTime(1, cal).toString(), "11:11:11");
} finally {
if (connection != null) {
connection.close();
}
}
}
@Test
public void timestampZeroTest() throws SQLException {
String timestampZero = "0000-00-00 00:00:00";
String dateZero = "0000-00-00";
sharedConnection.createStatement().execute("insert into timestampzerotest values ('"
+ timestampZero + "', '" + timestampZero + "', '" + dateZero + "')");
Statement stmt = sharedConnection.createStatement();
ResultSet rs = stmt.executeQuery("select * from timestampzerotest");
Timestamp ts = null;
Timestamp datetime = null;
Date date = null;
while (rs.next()) {
assertEquals(null, rs.getObject(1));
ts = rs.getTimestamp(1);
assertEquals(rs.wasNull(), true);
datetime = rs.getTimestamp(2);
assertEquals(rs.wasNull(), true);
date = rs.getDate(3);
assertEquals(rs.wasNull(), true);
}
rs.close();
assertEquals(ts, null);
assertEquals(datetime, null);
assertEquals(date, null);
}
@Test
public void javaUtilDateInPreparedStatementAsTimeStamp() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest values(?)");
ps.setObject(1, currentDate, Types.TIMESTAMP);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest");
rs.next();
/* Check that time is correct, up to seconds precision */
Assert.assertTrue(Math.abs((currentDate.getTime() - rs.getTimestamp(1).getTime())) <= 1000);
}
@Test
public void nullTimestampTest() throws SQLException {
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest2 values(null)");
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest2 where d is null");
rs.next();
Calendar cal = new GregorianCalendar();
assertEquals(null, rs.getTimestamp(1, cal));
}
@SuppressWarnings("deprecation")
@Test
public void javaUtilDateInPreparedStatementAsDate() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest3 values(?)");
ps.setObject(1, currentDate, Types.DATE);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest3");
rs.next();
/* Check that time is correct, up to seconds precision */
assertEquals(currentDate.getYear(), rs.getDate(1).getYear());
assertEquals(currentDate.getMonth(), rs.getDate(1).getMonth());
assertEquals(currentDate.getDay(), rs.getDate(1).getDay());
}
@SuppressWarnings("deprecation")
@Test
public void javaUtilDateInPreparedStatementAsTime() throws Exception {
java.util.Date currentDate = Calendar.getInstance(TimeZone.getDefault()).getTime();
PreparedStatement ps = sharedConnection.prepareStatement("insert into dtest4 values(?)");
ps.setObject(1, currentDate, Types.TIME);
ps.executeUpdate();
ResultSet rs = sharedConnection.createStatement().executeQuery("select * from dtest4");
rs.next();
assertEquals(currentDate.getHours(), rs.getTime(1).getHours());
/* Check that time is correct, up to seconds precision */
if (isMariadbServer()) {
assertEquals(currentDate.getMinutes(), rs.getTime(1).getMinutes());
assertEquals(currentDate.getSeconds(), rs.getTime(1).getSeconds());
} else {
//mysql 1 seconde precision
Assert.assertTrue(Math.abs(currentDate.getMinutes() - rs.getTime(1).getMinutes()) <= 1);
Assert.assertTrue(Math.abs(currentDate.getSeconds() - rs.getTime(1).getSeconds()) <= 1);
}
}
@Test
public void serverTimezone() throws Exception {
TimeZone tz = TimeZone.getDefault();
Connection connection = null;
try {
connection = setConnection("&serverTimezone=+5:00");
setSessionTimeZone(connection, "+5:00");
java.util.Date now = new java.util.Date();
TimeZone canadaTimeZone = TimeZone.getTimeZone("GMT+5:00");
long clientOffset = tz.getOffset(now.getTime());
long serverOffser = canadaTimeZone.getOffset(System.currentTimeMillis());
long totalOffset = serverOffser - clientOffset;
PreparedStatement ps = connection.prepareStatement("select now()");
ResultSet rs = ps.executeQuery();
rs.next();
java.sql.Timestamp ts = rs.getTimestamp(1);
long differenceToServer = ts.getTime() - now.getTime();
long diff = Math.abs(differenceToServer - totalOffset);
log.trace("diff : " + diff);
/* query take less than a second but taking in account server and client time second diff ... */
assertTrue(diff < 5000);
ps = connection.prepareStatement("select utc_timestamp(), ?");
ps.setObject(1, now);
rs = ps.executeQuery();
rs.next();
ts = rs.getTimestamp(1);
java.sql.Timestamp ts2 = rs.getTimestamp(2);
long diff2 = Math.abs(ts.getTime() - ts2.getTime()) - clientOffset;
assertTrue(diff2 < 5000); /* query take less than a second */
} finally {
connection.close();
}
}
/**
* Conj-107.
*
* @throws SQLException exception
*/
@Test
public void timestampMillisecondsTest() throws SQLException {
Statement statement = sharedConnection.createStatement();
boolean isMariadbServer = isMariadbServer();
if (isMariadbServer) {
createTable("tt", "id decimal(10), create_time datetime(6) default 0");
statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22.123456')");
} else {
createTable("tt", "id decimal(10), create_time datetime default 0");
statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22')");
}
PreparedStatement ps = sharedConnection.prepareStatement("insert into tt (id, create_time) values (?,?)");
ps.setInt(1, 2);
Timestamp writeTs = new Timestamp(1273017612999L);
Timestamp writeTsWithoutMilliSec = new Timestamp(1273017612999L);
ps.setTimestamp(2, writeTs);
ps.execute();
ResultSet rs = statement.executeQuery("SELECT * FROM tt");
assertTrue(rs.next());
if (isMariadbServer) {
assertTrue("2013-07-18 13:44:22.123456".equals(rs.getString(2)));
} else {
assertTrue("2013-07-18 13:44:22".equals(rs.getString(2)));
}
assertTrue(rs.next());
Timestamp readTs = rs.getTimestamp(2);
if (isMariadbServer) {
assertEquals(writeTs, readTs);
} else {
assertEquals(writeTs, writeTsWithoutMilliSec);
}
}
@Test
public void dateTestWhenServerDifference() throws Throwable {
Connection connection = null;
try {
connection = setConnection("&serverTimezone=UTC");
PreparedStatement pst = connection.prepareStatement("insert into date_test3 values (?)");
java.sql.Date date = java.sql.Date.valueOf("2013-02-01");
pst.setDate(1, date);
pst.execute();
pst = connection.prepareStatement("select x from date_test3 WHERE x = ?");
pst.setDate(1, date);
ResultSet rs = pst.executeQuery();
rs.next();
Date dd = rs.getDate(1);
assertEquals(dd, date);
} finally {
connection.close();
}
}
@Test
public void dateTestWhenServerDifferenceClient() throws Throwable {
Connection connection = null;
try {
connection = setConnection("&serverTimezone=UTC");
PreparedStatement pst = connection.prepareStatement("/*CLIENT*/insert into date_test4 values (?)");
java.sql.Date date = java.sql.Date.valueOf("2013-02-01");
pst.setDate(1, date);
pst.execute();
pst = connection.prepareStatement("/*CLIENT*/ select x from date_test4 WHERE x = ?");
pst.setDate(1, date);
ResultSet rs = pst.executeQuery();
rs.next();
Date dd = rs.getDate(1);
assertEquals(dd, date);
} finally {
connection.close();
}
}
}
| [test] correcting change timestamp default value '0000-00-00 00:00:00' since now deprecated in MySQL 5.7
| src/test/java/org/mariadb/jdbc/DateTest.java | [test] correcting change timestamp default value '0000-00-00 00:00:00' since now deprecated in MySQL 5.7 | <ide><path>rc/test/java/org/mariadb/jdbc/DateTest.java
<ide>
<ide> @Test
<ide> public void timestampZeroTest() throws SQLException {
<add> assertTrue(isMariadbServer());
<ide> String timestampZero = "0000-00-00 00:00:00";
<ide> String dateZero = "0000-00-00";
<ide> sharedConnection.createStatement().execute("insert into timestampzerotest values ('"
<ide>
<ide> boolean isMariadbServer = isMariadbServer();
<ide> if (isMariadbServer) {
<del> createTable("tt", "id decimal(10), create_time datetime(6) default 0");
<add> createTable("tt", "id decimal(10), create_time datetime(6)");
<ide> statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22.123456')");
<ide> } else {
<del> createTable("tt", "id decimal(10), create_time datetime default 0");
<add> createTable("tt", "id decimal(10), create_time datetime");
<ide> statement.execute("INSERT INTO tt (id, create_time) VALUES (1,'2013-07-18 13:44:22')");
<ide> }
<ide> PreparedStatement ps = sharedConnection.prepareStatement("insert into tt (id, create_time) values (?,?)"); |
|
Java | apache-2.0 | f55d420e6907e0738363e58744d4d130a6ff9e7f | 0 | esoco/esoco-business | //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-business' project.
// Copyright 2016 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.process.step;
import de.esoco.data.DataRelationTypes;
import de.esoco.data.SessionManager;
import de.esoco.data.UploadHandler;
import de.esoco.data.element.DataElementList.ViewDisplayType;
import de.esoco.data.element.SelectionDataElement;
import de.esoco.entity.Entity;
import de.esoco.entity.EntityRelationTypes.HierarchicalQueryMode;
import de.esoco.entity.ExtraAttributes;
import de.esoco.lib.collection.CollectionUtil;
import de.esoco.lib.expression.Predicate;
import de.esoco.lib.expression.function.AbstractAction;
import de.esoco.lib.property.Updatable;
import de.esoco.lib.property.UserInterfaceProperties;
import de.esoco.lib.property.UserInterfaceProperties.ContentType;
import de.esoco.lib.property.UserInterfaceProperties.InteractiveInputMode;
import de.esoco.process.Parameter;
import de.esoco.process.ParameterList;
import de.esoco.process.Process;
import de.esoco.process.ProcessElement;
import de.esoco.process.ProcessException;
import de.esoco.process.ProcessFragment;
import de.esoco.process.ProcessRelationTypes;
import de.esoco.process.ProcessStep;
import de.esoco.process.step.DialogFragment.DialogAction;
import de.esoco.process.step.DialogFragment.DialogActionListener;
import de.esoco.process.step.Interaction.InteractionHandler;
import de.esoco.storage.QueryPredicate;
import de.esoco.storage.StorageException;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.obrel.core.Relation;
import org.obrel.core.RelationType;
import org.obrel.core.RelationTypes;
import org.obrel.type.MetaTypes;
import static de.esoco.entity.EntityPredicates.forEntity;
import static de.esoco.entity.EntityRelationTypes.HIERARCHICAL_QUERY_MODE;
import static de.esoco.lib.property.UserInterfaceProperties.CONTENT_TYPE;
import static de.esoco.lib.property.UserInterfaceProperties.CURRENT_SELECTION;
import static de.esoco.lib.property.UserInterfaceProperties.DISABLED;
import static de.esoco.lib.property.UserInterfaceProperties.URL;
import static de.esoco.process.ProcessRelationTypes.INPUT_PARAMS;
import static de.esoco.process.ProcessRelationTypes.ORIGINAL_RELATION_TYPE;
import static de.esoco.process.ProcessRelationTypes.PARAM_UPDATE_LISTENERS;
import static org.obrel.type.StandardTypes.ERROR_MESSAGE;
/********************************************************************
* A process element subclass that serves as a fragment of an interactive
* process step. This allows to split the user interface of complex interactions
* into different parts that can more easily be re-used.
*
* @author eso
*/
public abstract class InteractionFragment extends ProcessFragment
{
//~ Static fields/initializers ---------------------------------------------
private static final long serialVersionUID = 1L;
/** The resource string for an error message box icon. */
public static final String MESSAGE_BOX_ERROR_ICON = "#imErrorMessage";
/** The resource string for a warning message box icon. */
public static final String MESSAGE_BOX_WARNING_ICON = "#imWarningMessage";
/** The resource string for a question message box icon. */
public static final String MESSAGE_BOX_QUESTION_ICON = "#imQuestionMessage";
/** The resource string for an info message box icon. */
public static final String MESSAGE_BOX_INFO_ICON = "#imInfoMessage";
private static int nNextFragmentId = 0;
//~ Instance fields --------------------------------------------------------
private int nFragmentId = nNextFragmentId++;
private Interaction rProcessStep;
private InteractionFragment rParent;
private RelationType<List<RelationType<?>>> rFragmentParam;
private List<RelationType<?>> aFragmentContinuationParams = null;
private List<RelationType<?>> aInteractionParams = new ArrayList<>();
private Set<RelationType<?>> aInputParams = new HashSet<>();
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*/
public InteractionFragment()
{
RelationTypes.init(getClass());
}
//~ Methods ----------------------------------------------------------------
/***************************************
* Must be implemented to initialize the interaction parameters of this
* fragment.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public abstract void init() throws Exception;
/***************************************
* Overridden to add nonexistent parameters to the list of interaction
* parameters of this instance as returned by the method {@link
* #getInteractionParameters()}. The returned collection must therefore be
* mutable (as is the case with the default parameter collection).
*
* <p>This implementation replaces the base class implementation which
* changes the interaction parameters of the process step instead of the
* fragment.</p>
*
* @param rParams The interaction parameters to add
*/
@Override
public void addDisplayParameters(
Collection<? extends RelationType<?>> rParams)
{
List<RelationType<?>> rInteractionParams = getInteractionParameters();
for (RelationType<?> rParam : rParams)
{
// do not add parameters that are displayed in panels because they
// are stored in the parameter list of the panel parameter
if (!isPanelParameter(rParam) &&
!rInteractionParams.contains(rParam))
{
rInteractionParams.add(rParam);
}
}
}
/***************************************
* @see #addInputParameters(Collection)
*/
@Override
public void addInputParameters(RelationType<?>... rParams)
{
addInputParameters(Arrays.asList(rParams));
}
/***************************************
* Adds the given parameters to the interaction and input parameters of this
* instance. The input parameters are queried with the method {@link
* #getInputParameters()}, the interaction parameters are updated with
* {@link #addDisplayParameters(Collection)}.
*
* <p>This implementation replaces the base class implementation which
* changes the interaction parameters of the process step instead of the
* fragment.</p>
*
* @param rParams The input parameters to add
*
* @see #addDisplayParameters(Collection)
*/
@Override
public void addInputParameters(
Collection<? extends RelationType<?>> rParams)
{
addDisplayParameters(rParams);
markInputParams(true, rParams);
}
/***************************************
* Convenience method to add a listener to the process step relation with
* the type {@link ProcessRelationTypes#PARAM_UPDATE_LISTENERS}. To remove a
* listener it should be removed from the parameter set directly.
*
* @param rListener The listener to add
*/
public void addParameterUpdateListener(Updatable rListener)
{
get(PARAM_UPDATE_LISTENERS).add(rListener);
}
/***************************************
* A variant of {@link #addSubFragment(String, InteractionFragment)} that
* uses the name of the fragment class for the temporary fragment parameter.
*
* @see #addSubFragment(String, InteractionFragment)
*/
public Parameter<List<RelationType<?>>> addSubFragment(
InteractionFragment rSubFragment)
{
return addSubFragment(rSubFragment.getClass().getSimpleName(),
rSubFragment);
}
/***************************************
* Adds a subordinate fragment to this instance into a temporary parameter
* and directly displays it.
*
* @see #addSubFragment(String, InteractionFragment, boolean)
*/
public Parameter<List<RelationType<?>>> addSubFragment(
String sName,
InteractionFragment rSubFragment)
{
return addSubFragment(sName, rSubFragment, true);
}
/***************************************
* Overridden to set the parent of the sub-fragment to this instance.
*
* @return
*
* @see ProcessFragment#addSubFragment(RelationType, InteractionFragment)
*/
@Override
public void addSubFragment(
RelationType<List<RelationType<?>>> rFragmentParam,
InteractionFragment rSubFragment)
{
rSubFragment.rParent = this;
super.addSubFragment(rFragmentParam, rSubFragment);
}
/***************************************
* Adds a subordinate fragment to this instance into a temporary parameter
* and optionally displays it. The temporary parameter relation type will be
* created with the given name by invoking {@link #listParam(String, Class)}
* and the parameter wrapper will be returned. The fragment will be added by
* invoking {@link #addSubFragment(RelationType, InteractionFragment)}.
* Furthermore the UI property {@link UserInterfaceProperties#HIDE_LABEL}
* will be set on the new fragment parameter because fragments are typically
* displayed without a label.
*
* @param sName The name of the temporary fragment parameter
* @param rSubFragment The fragment to add
* @param bDisplay TRUE to invoke {@link Parameter#display()} on the
* new fragment parameter
*
* @return The wrapper for the fragment parameter
*/
public Parameter<List<RelationType<?>>> addSubFragment(
String sName,
InteractionFragment rSubFragment,
boolean bDisplay)
{
Parameter<List<RelationType<?>>> rSubFragmentParam =
listParam(sName, RelationType.class);
addSubFragment(rSubFragmentParam.type(), rSubFragment);
if (bDisplay)
{
rSubFragmentParam.display();
}
return rSubFragmentParam.hideLabel();
}
/***************************************
* Internal method that will be invoked to attach this fragment to the given
* process step and fragment parameter.
*
* @param rProcessStep The process step to attach this instance to
* @param rFragmentParam The parameter this fragment will be stored in
*/
public void attach(
Interaction rProcessStep,
RelationType<List<RelationType<?>>> rFragmentParam)
{
this.rFragmentParam = rFragmentParam;
setProcessStep(rProcessStep);
setup();
}
/***************************************
* Can be overridden by subclasses to perform resource cleanups when the
* process ends. The default implementation does nothing.
*
* @see ProcessStep#cleanup()
*/
public void cleanup()
{
}
/***************************************
* Clears lists returned by the methods {@link #getInteractionParameters()}
* and {@link #getInputParameters()}. These lists must therefore be mutable!
*
* <p>This implementation replaces the base class implementation because the
* parent method changes the interaction parameters of the process step.</p>
*/
@Override
public void clearInteractionParameters()
{
getInteractionParameters().clear();
getInputParameters().clear();
}
/***************************************
* Clear the selection of a certain parameter by setting it's value to NULL
* and the property {@link UserInterfaceProperties#CURRENT_SELECTION} to -1.
*
* @param rParam The parameter to clear the selection of
*/
public void clearSelection(RelationType<?> rParam)
{
Object rParamValue = getParameter(rParam);
boolean bClearSelection = (rParamValue != null);
if (SelectionDataElement.class.isAssignableFrom(rParam.getTargetType()))
{
SelectionDataElement rElement = (SelectionDataElement) rParamValue;
if (!SelectionDataElement.NO_SELECTION.equals(rElement.getValue()))
{
rElement.setValue(SelectionDataElement.NO_SELECTION);
bClearSelection = true;
}
}
else
{
setParameter(rParam, null);
}
// only clear selection if one exists to prevent unnecessary updates
if (bClearSelection)
{
setUIProperty(-1, CURRENT_SELECTION, rParam);
}
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a {@link Date} datatype.
*
* @see #param(String, Class)
*/
public Parameter<Date> dateParam(String sName)
{
return param(sName, Date.class);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#deleteRelation(Relation)
*/
@Override
public void deleteRelation(Relation<?> rRelation)
{
rProcessStep.deleteRelation(rRelation);
}
/***************************************
* Enables or disables the editing of this fragment and of all it's
* children. This is achieved by clearing or setting the flag property
* {@link UserInterfaceProperties#DISABLED} on the fragment input
* parameters. Subclasses may override this method to implement a more
* specific handling but should normally also call the superclass
* implementation.
*
* @param bEnable TRUE to enable editing, FALSE to disable
*/
public void enableEdit(boolean bEnable)
{
if (bEnable)
{
clearUIFlag(DISABLED, getInputParameters());
}
else
{
setUIFlag(DISABLED, getInputParameters());
}
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.enableEdit(bEnable);
}
}
/***************************************
* Can be overridden by a fragment to execute actions when the process flow
* leaves this fragment.
*
* <p>The default implementation does nothing.</p>
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void finish() throws Exception
{
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a boolean datatype.
*
* @see #param(String, Class)
*/
public Parameter<Boolean> flagParam(String sName)
{
return param(sName, Boolean.class);
}
/***************************************
* Creates a new parameter wrapper for the relation type this fragment is
* stored in.
*
* @return the parameter wrapper for the fragment parameter
*/
public ParameterList fragmentParam()
{
return new ParameterList(rParent != null ? rParent : this,
getFragmentParameter(),
false);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#get(RelationType)
*/
@Override
public <T> T get(RelationType<T> rType)
{
return rProcessStep.get(rType);
}
/***************************************
* Returns the parameter this fragment is displayed in.
*
* @return The fragment parameter
*/
public final RelationType<List<RelationType<?>>> getFragmentParameter()
{
return rFragmentParam;
}
/***************************************
* Returns the collection of input parameters of this fragment. These must
* be a subset of {@link #getInteractionParameters()}. The default
* implementation returns a mutable collection that can been modified
* directly by a subclass. Or it can be overridden by subclasses to return
* their own input parameter collection.
*
* @return The list of this fragment's input parameters
*/
public Collection<RelationType<?>> getInputParameters()
{
return aInputParams;
}
/***************************************
* Returns the list of interaction parameters for this fragment. The default
* implementation returns a mutable list that can been modified directly by
* a subclass. Or it can be overridden by subclasses to return their own
* interaction parameter list.
*
* @return The list of this fragment's interaction parameters
*/
public List<RelationType<?>> getInteractionParameters()
{
return aInteractionParams;
}
/***************************************
* Sets the interaction handler for a certain parameter.
*
* @see Interaction#getParameterInteractionHandler(RelationType)
*/
public InteractionHandler getParameterInteractionHandler(
RelationType<?> rParam)
{
return getProcessStep().getParameterInteractionHandler(rParam);
}
/***************************************
* Returns the parent fragment of this instance.
*
* @return The parent fragment or NULL for a root fragment
*/
public final InteractionFragment getParent()
{
return rParent;
}
/***************************************
* @see ProcessFragment#getProcess()
*/
@Override
public Process getProcess()
{
return rProcessStep.getProcess();
}
/***************************************
* Returns the interactive process step this element is associated with.
*
* @return The process step this fragment belongs to
*/
@Override
public final Interaction getProcessStep()
{
return rProcessStep;
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#getRelation(RelationType)
*/
@Override
public <T> Relation<T> getRelation(RelationType<T> rType)
{
return rProcessStep.getRelation(rType);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#getRelations(Predicate)
*/
@Override
public List<Relation<?>> getRelations(
Predicate<? super Relation<?>> rFilter)
{
return rProcessStep.getRelations(rFilter);
}
/***************************************
* Must be implemented by subclasses to handle interactions for this
* fragment. The default implementation does nothing.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void handleInteraction(RelationType<?> rInteractionParam)
throws Exception
{
}
/***************************************
* Checks whether an interaction has been caused by an interaction parameter
* from this fragment. The default implementation checks if the given
* parameter is one of this fragment's interaction parameters.
*
* @param rInteractionParam The interaction parameter to check
*
* @return TRUE if the interaction was caused by a parameter of this
* fragment
*/
public boolean hasInteraction(RelationType<?> rInteractionParam)
{
return getInputParameters().contains(rInteractionParam);
}
/***************************************
* Initializes a parameter for the display of a storage query.
*
* @param rParam The parameter to initialize the query for
* @param rEntityClass The entity class to query
* @param pCriteria The query criteria the query criteria or NULL for
* none
* @param pSortOrder The sort predicate or NULL for the default order
* @param eMode bHierarchical TRUE for a hierarchical query with a
* tree-table display
* @param rColumns The columns to display
*
* @return The generated query predicate
*/
public <E extends Entity> QueryPredicate<E> initQueryParameter(
RelationType<E> rParam,
Class<E> rEntityClass,
Predicate<? super E> pCriteria,
Predicate<? super Entity> pSortOrder,
HierarchicalQueryMode eMode,
RelationType<?>... rColumns)
{
QueryPredicate<E> qEntities = forEntity(rEntityClass, pCriteria);
qEntities.set(HIERARCHICAL_QUERY_MODE, eMode);
annotateForEntityQuery(rParam, qEntities, pSortOrder, rColumns);
return qEntities;
}
/***************************************
* @see #insertInputParameters(RelationType, RelationType...)
*/
public void insertInputParameters(
RelationType<?> rBeforeParam,
RelationType<?>... rParams)
{
insertInputParameters(rBeforeParam, Arrays.asList(rParams));
}
/***************************************
* Inserts additional parameters into the lists returned by the methods
* {@link #getInteractionParameters()} and {@link #getInputParameters()}.
* These lists must therefore be mutable!
*
* @param rBeforeParam The parameter to insert the other parameters before
* @param rParams The parameters to add
*/
public void insertInputParameters(
RelationType<?> rBeforeParam,
Collection<RelationType<?>> rParams)
{
CollectionUtil.insert(getInteractionParameters(),
rBeforeParam,
rParams);
getInputParameters().addAll(rParams);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* an integer datatype.
*
* @see #param(String, Class)
*/
public Parameter<Integer> intParam(String sName)
{
return param(sName, Integer.class);
}
/***************************************
* Create a new parameter wrapper for this fragment with a temporary
* relation type.
*
* @param sName The name of the relation type
* @param rElementType rDatatype The parameter datatype
*
* @return the parameter instance
*/
public <T> Parameter<List<T>> listParam(
String sName,
Class<? super T> rElementType)
{
return param(getTemporaryListType(sName, rElementType));
}
/***************************************
* Marks the input parameters of this fragment and all of it's
* sub-fragments.
*/
public void markFragmentInputParams()
{
get(INPUT_PARAMS).addAll(getInputParameters());
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.markFragmentInputParams();
}
}
/***************************************
* Overridden to operate on the fragment input parameters.
*
* @see ProcessElement#markInputParams(boolean, Collection)
*/
@Override
public void markInputParams(
boolean bInput,
Collection<? extends RelationType<?>> rParams)
{
Collection<RelationType<?>> rInputParams = getInputParameters();
for (RelationType<?> rParam : rParams)
{
boolean bHasParam = rInputParams.contains(rParam);
if (!bHasParam && bInput)
{
rInputParams.add(rParam);
}
else if (bHasParam && !bInput)
{
rInputParams.remove(rParam);
}
}
super.markInputParams(bInput, rParams);
}
/***************************************
* Marks a hierarchy of parameters as modified.
*
* @param rParams The list of root parameters
*/
public void markParameterHierarchyAsModified(
Collection<RelationType<?>> rParams)
{
for (RelationType<?> rParam : rParams)
{
markParameterAsModified(rParam);
if (Collection.class.isAssignableFrom(rParam.getTargetType()))
{
if (rParam.get(MetaTypes.ELEMENT_DATATYPE) ==
RelationType.class)
{
@SuppressWarnings("unchecked")
Collection<RelationType<?>> rChildParams =
(Collection<RelationType<?>>) getParameter(rParam);
if (rChildParams != null)
{
markParameterHierarchyAsModified(rChildParams);
}
}
}
}
}
/***************************************
* Notifies all listeners for parameter updates that are registered in the
* relation {@link ProcessRelationTypes#PARAM_UPDATE_LISTENERS} of this
* fragment's process step. Because relations are shared between fragments
* this will affect all fragments in the current interaction.
*/
public void notifyParameterUpdateListeners()
{
if (hasRelation(PARAM_UPDATE_LISTENERS))
{
for (Updatable rListener : get(PARAM_UPDATE_LISTENERS))
{
rListener.update();
}
}
}
/***************************************
* Creates a new temporary relation type for a list of relation types and
* returns a parameter wrapper for it.
*
* @param sName The name of the parameter list
*
* @return the parameter wrapper for the parameter list
*/
public ParameterList panel(String sName)
{
RelationType<List<RelationType<?>>> rListType =
getTemporaryListType(sName, RelationType.class);
return new ParameterList(this, rListType, true);
}
/***************************************
* Creates a new parameter wrapper for the given relation type in this
* fragment.
*
* @param rParam The parameter to wrap
*
* @return the parameter wrapper
*/
public <T> Parameter<T> param(RelationType<T> rParam)
{
return new Parameter<>(this, rParam);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* an enum datatype. The parameter will be named with the simple name of the
* enum class.
*
* @see #param(String, Class)
*/
public <E extends Enum<E>> Parameter<E> param(Class<E> rEnumClass)
{
return param(rEnumClass.getSimpleName(), rEnumClass);
}
/***************************************
* Create a new parameter wrapper for this fragment with a temporary
* relation type. If no matching temporary relation type exists already it
* will be created.
*
* @param sName The name of the relation type
* @param rDatatype The parameter datatype
*
* @return the parameter wrapper
*/
public <T> Parameter<T> param(String sName, Class<? super T> rDatatype)
{
return param(getTemporaryParameterType(sName, rDatatype));
}
/***************************************
* Creates a new temporary parameter relation type that is derived from
* another relation type. The other type must be from a different scope
* (i.e. not the same fragment) or else a name conflict will occur. The
* derived relation type will have the original relation in a meta relation
* with the type {@link ProcessRelationTypes#ORIGINAL_RELATION_TYPE}.
*
* @param rOriginalType The original relation type the new parameter is
* based on
*
* @return A new parameter wrapper for the derived relation type
*/
public <T> Parameter<T> paramLike(RelationType<T> rOriginalType)
{
Parameter<T> rDerivedParam =
param(rOriginalType.getSimpleName(), rOriginalType.getTargetType());
rDerivedParam.type().set(ORIGINAL_RELATION_TYPE, rOriginalType);
return rDerivedParam;
}
/***************************************
* Can be implemented by subclasses to initialize the interaction of this
* fragment. This method will be invoked on every iteration of this
* fragment's interaction, i.e. on the first run and every time after an
* interaction event occurred. The default implementation does nothing.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void prepareInteraction() throws Exception
{
}
/***************************************
* Removes parameters from the lists returned by the methods {@link
* #getInteractionParameters()} and {@link #getInputParameters()}. These
* lists must therefore be mutable!
*
* <p>This implementation replaces the base class implementation because the
* parent method changes the interaction parameters of the process step.</p>
*
* @param rParams The parameters to remove
*/
@Override
public void removeInteractionParameters(Collection<RelationType<?>> rParams)
{
getInteractionParameters().removeAll(rParams);
getInputParameters().removeAll(rParams);
}
/***************************************
* Removes a subordinate fragment that had been added previously by means of
* {@link #addSubFragment(RelationType, InteractionFragment)}.
*
* @param rFragmentParam The target parameter of the fragment parameters
* @param rSubFragment The sub-fragment instance to remove
*/
public void removeSubFragment(
RelationType<List<RelationType<?>>> rFragmentParam,
InteractionFragment rSubFragment)
{
get(INPUT_PARAMS).removeAll(rSubFragment.getInputParameters());
get(INPUT_PARAMS).remove(rFragmentParam);
getSubFragments().remove(rSubFragment);
deleteParameters(rFragmentParam);
rSubFragment.setProcessStep(null);
rSubFragment.rParent = null;
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#set(RelationType, Object)
*/
@Override
public <T> Relation<T> set(RelationType<T> rType, T rTarget)
{
return rProcessStep.set(rType, rTarget);
}
/***************************************
* Overridden to remember the continuation parameters of this fragment.
*
* @see ProcessFragment#setContinueOnInteraction(boolean, RelationType...)
*/
@Override
public void setContinueOnInteraction(
boolean bContinue,
RelationType<?>... rParams)
{
List<RelationType<?>> aParamList = Arrays.asList(rParams);
if (bContinue)
{
if (aFragmentContinuationParams == null)
{
aFragmentContinuationParams = new ArrayList<RelationType<?>>();
}
aFragmentContinuationParams.addAll(aParamList);
}
else if (aFragmentContinuationParams != null)
{
aFragmentContinuationParams.removeAll(aParamList);
}
super.setContinueOnInteraction(bContinue, rParams);
}
/***************************************
* Sets the interaction handler for a certain parameter.
*
* @see Interaction#setParameterInteractionHandler(RelationType, InteractionHandler)
*/
public void setParameterInteractionHandler(
RelationType<?> rParam,
InteractionHandler rInteractionHandler)
{
getProcessStep().setParameterInteractionHandler(rParam,
rInteractionHandler);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a string datatype.
*
* @see #param(String, Class)
*/
public Parameter<String> textParam(String sName)
{
return param(sName, String.class);
}
/***************************************
* Request a complete update of this fragment's UI by marking all
* interaction parameters including their hierarchy as modified.
*/
public void updateUserInterface()
{
markParameterHierarchyAsModified(getInteractionParameters());
}
/***************************************
* This method can be overridden by subclasses to validate process
* parameters during state changes of the process. The default
* implementation returns an new empty map instance that may be modified
* freely by overriding methods to add their own error messages if
* necessary.
*
* @see ProcessStep#validateParameters(boolean)
*/
public Map<RelationType<?>, String> validateParameters(
boolean bOnInteraction)
{
return new HashMap<RelationType<?>, String>();
}
/***************************************
* This method will be invoked if the current execution of this fragment is
* aborted and can be overridden by subclasses to perform data resets
* similar to the {@link #rollback()} method.
*/
protected void abort()
{
}
/***************************************
* Can be implemented by subclasses to react on interactions that occurred
* in other fragments. This method will be invoked after {@link
* #handleInteraction(RelationType)}. The default implementation does
* nothing.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
protected void afterInteraction(RelationType<?> rInteractionParam)
throws Exception
{
}
/***************************************
* This method can be overridden by a subclass to indicate whether it
* supports a rollback of the modifications it has performed. The default
* implementation always returns TRUE.
*
* @return TRUE if the step implementation support a rollback
*
* @see #rollback()
* @see ProcessStep#canRollback()
*/
protected boolean canRollback()
{
return true;
}
/***************************************
* Overridden to return a package name that is relative to the current
* fragment instance.
*
* @see ProcessFragment#getTemporaryParameterPackage()
*/
@Override
protected String getTemporaryParameterPackage()
{
return getClass().getSimpleName().toLowerCase() + nFragmentId;
}
/***************************************
* Will be invoked after the process step of this fragment has been set. Can
* be implemented by subclasses to initialize process step-specific
* parameters. The default implementation does nothing.
*
* @param rProcessStep The process step of this fragment
*/
protected void initProcessStep(Interaction rProcessStep)
{
}
/***************************************
* Prepares the upload of a file into a process parameter. This requires two
* parameters. One string parameter that will be configured to invoke a file
* chooser and then holds the name of the selected file. This parameter must
* be configured as an input parameter. And a target parameter that will
* receive the result of a successful file upload.
*
* @param rFileSelectParam The parameter for the file selection
* @param rTargetParam The target parameter for the file content
* @param rContentTypePattern A pattern that limits allowed content types
* or NULL for no restriction
* @param nMaxSize The maximum upload size
*
* @throws Exception If preparing the upload fails
*/
protected void prepareUpload(RelationType<String> rFileSelectParam,
RelationType<byte[]> rTargetParam,
Pattern rContentTypePattern,
int nMaxSize) throws Exception
{
final SessionManager rSessionManager =
getParameter(DataRelationTypes.SESSION_MANAGER);
ProcessParamUploadHandler aUploadHandler =
new ProcessParamUploadHandler(rTargetParam,
rContentTypePattern,
nMaxSize);
String sOldUrl = getUIProperty(URL, rFileSelectParam);
if (sOldUrl != null)
{
rSessionManager.removeUpload(sOldUrl);
getProcessStep().removeFinishAction(sOldUrl);
}
final String sUploadUrl = rSessionManager.prepareUpload(aUploadHandler);
setUIProperty(CONTENT_TYPE, ContentType.FILE_UPLOAD, rFileSelectParam);
setUIProperty(URL, sUploadUrl, rFileSelectParam);
setInteractive(InteractiveInputMode.ACTION, rFileSelectParam);
getProcessStep().addFinishAction(sUploadUrl,
new AbstractAction<ProcessStep>("removeUpload")
{
@Override
public void execute(ProcessStep rValue)
{
rSessionManager.removeUpload(sUploadUrl);
}
});
}
/***************************************
* Can be overridden to perform a rollback of data and parameter
* modifications that have been performed by this fragment. By default all
* fragments are assumed to be capable of being rolled back. The default
* implementation does nothing.
*
* @see ProcessStep#rollback()
*/
protected void rollback() throws Exception
{
}
/***************************************
* Sets the values of process parameters from the attributes of an entity.
* To make this work the given relation types must be entity attribute types
* which will then be set as process parameters. The attributes can either
* by direct or extra attributes (which must have the extra attribute flag
* set).
*
* <p>To set modified parameter values back into the entity the method
* {@link #updateEntityFromParameterValues(Entity, List)} can be invoked.
* </p>
*
* @param rEntity The entity to read the attributes from
* @param rAttributes The entity attributes and process parameters
*
* @throws StorageException If querying an extra attribute fails
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void setParameterValuesFromEntity(
Entity rEntity,
List<RelationType<?>> rAttributes) throws StorageException
{
for (RelationType<?> rParam : rAttributes)
{
if (rEntity.getDefinition().getAttributes().contains(rParam))
{
Object rValue =
rEntity.hasRelation(rParam) ? rEntity.get(rParam)
: rParam.initialValue(rEntity);
setParameter((RelationType) rParam, rValue);
}
else if (rParam.hasFlag(ExtraAttributes.EXTRA_ATTRIBUTE_FLAG))
{
setParameter((RelationType) rParam,
rEntity.getExtraAttribute(rParam, null));
}
}
}
/***************************************
* @see #setParameterValuesFromEntity(Entity, List)
*/
protected void setParameterValuesFromEntity(
Entity rEntity,
RelationType<?>... rParams) throws StorageException
{
setParameterValuesFromEntity(rEntity, Arrays.asList(rParams));
}
/***************************************
* Sets the parent value.
*
* @param rParent The parent value
*/
protected final void setParent(InteractionFragment rParent)
{
this.rParent = rParent;
}
/***************************************
* Can be overridden to setup the internal state of a new fragment instance.
* Other than {@link #init()} this method will only be invoked once, right
* after an instance has been added to it's process step. The default
* implementation does nothing.
*/
protected void setup()
{
}
/***************************************
* Adds a sub-fragment to be displayed as a modal dialog.
*
* @see #showDialog(String, InteractionFragment, ViewDisplayType,
* DialogActionListener, DialogAction...)
*/
protected DialogFragment showDialog(String sParamNameTemplate,
InteractionFragment rContentFragment,
DialogActionListener rDialogListener,
DialogAction... rDialogActions)
throws Exception
{
return showDialog(sParamNameTemplate,
rContentFragment,
true,
rDialogListener,
Arrays.asList(rDialogActions));
}
/***************************************
* Adds a sub-fragment to be displayed as a modal dialog.
*
* @see #showDialog(String, InteractionFragment, boolean, String,
* DialogActionListener, Collection)
*/
protected DialogFragment showDialog(
String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions) throws Exception
{
return showDialog(sParamNameTemplate,
rContentFragment,
bModal,
null,
rDialogListener,
rDialogActions);
}
/***************************************
* Adds a sub-fragment to be displayed as a dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* <p>If the creating code needs to programmatically close the dialog view
* instead of by a button click of the user it can do so by invoking the
* {@link ViewFragment#hide()} method on the returned view fragment instance
* on a corresponding interaction.</p>
*
* @param sParamNameTemplate The name template to be used for generated
* dialog parameter names or NULL to derive it
* from the content fragment
* @param rContentFragment The fragment to be displayed as the dialog
* content
* @param bModal TRUE for a modal view
* @param sQuestion A string (typically a question) that will be
* displayed next to the dialog action buttons.
* @param rDialogListener The dialog action listener or NULL for none
* @param rDialogActions The actions to be displayed as the dialog
* buttons
*
* @return The new dialog fragment instance
*
* @throws Exception If displaying the dialog fails
*/
protected DialogFragment showDialog(
String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal,
String sQuestion,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions) throws Exception
{
DialogFragment aDialog =
new DialogFragment(sParamNameTemplate,
rContentFragment,
bModal,
sQuestion,
rDialogActions);
showDialogImpl(aDialog, rDialogListener);
return aDialog;
}
/***************************************
* Displays a message with an error icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showErrorMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_ERROR_ICON,
null,
DialogAction.OK);
}
/***************************************
* Displays a message with an info icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showInfoMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_INFO_ICON,
null,
DialogAction.OK);
}
/***************************************
* Displays a process message in a message box dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showMessageBox(
String sMessage,
String sIcon,
DialogActionListener rDialogListener,
DialogAction... rDialogActions)
{
return showMessageBox(sMessage,
sIcon,
rDialogListener,
Arrays.asList(rDialogActions));
}
/***************************************
* Displays a process message in a message box dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* <p>If one or more extras parameters are given they will be displayed
* between the message and the dialog buttons. Any necessary initialization
* of these parameters including UI properties must be done by the invoking
* code before invoking the message box.</p>
*
* @param sMessage The message to be displayed in the message box
* @param sIcon The resource name for an icon or NULL for the
* standard icon.
* @param rDialogListener The dialog action listener or NULL for none
* @param rDialogActions The actions to be displayed as the message box
* buttons
* @param rExtraParams Optional extra parameters to be displayed in the
* message box
*
* @return The view fragment that has been created for the message box
*/
protected MessageBoxFragment showMessageBox(
String sMessage,
String sIcon,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions,
RelationType<?>... rExtraParams)
{
MessageBoxFragment aMessageBox =
new MessageBoxFragment(sMessage,
sIcon,
rDialogActions,
rExtraParams);
try
{
showDialogImpl(aMessageBox, rDialogListener);
}
catch (Exception e)
{
// message boxes should not fail
throw new IllegalStateException(e);
}
return aMessageBox;
}
/***************************************
* Displays a modal dialog with a name prefix that is derived from the name
* of the content fragment.
*
* @see #showDialog(String, InteractionFragment, boolean,
* DialogActionListener, Collection)
*/
protected DialogFragment showModalDialog(
InteractionFragment rContentFragment,
Collection<DialogAction> rDialogActions) throws Exception
{
return showDialog(null, rContentFragment, true, null, rDialogActions);
}
/***************************************
* Adds a sub-fragment to be displayed as a view. The parameter for the view
* fragment will be added automatically to the input parameters of this
* instance. Therefore the parameter lists of this instance MUST be mutable!
*
* <p>Because a view has no explicit buttons like dialogs it must be closed
* by the creating code by invoking the {@link ViewFragment#hide()} method
* on the returned view fragment instance on a corresponding interaction.
* </p>
*
* @param sParamNameTemplate The name template to be used for generated
* view parameter names
* @param rContentFragment The fragment to be displayed as the view
* content
* @param bModal TRUE for a modal view
*
* @return The new view fragment to provide access to it's method {@link
* ViewFragment#hide()}
*
* @throws Exception If displaying the dialog fails
*/
protected ViewFragment showView(String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal) throws Exception
{
ViewFragment aViewFragment =
new ViewFragment(sParamNameTemplate,
rContentFragment,
bModal ? ViewDisplayType.MODAL_VIEW
: ViewDisplayType.VIEW);
aViewFragment.show(this);
return aViewFragment;
}
/***************************************
* Displays a message with an warning icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showWarningMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_WARNING_ICON,
null,
DialogAction.OK);
}
/***************************************
* Updates the attributes of an entity from the process parameter values
* that are stored with the given entity attribute relation types. The
* attributes can either be direct or extra attributes.
*
* <p>To set the process parameters from entity attributes the reverse
* method {@link #setParameterValuesFromEntity(Entity, List)} can be used.
* </p>
*
* @param rEntity The entity to update
* @param rParams The process parameter and entity attribute relation types
*
* @throws StorageException if setting an extra attribute fails
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void updateEntityFromParameterValues(
Entity rEntity,
List<RelationType<?>> rParams) throws StorageException
{
for (RelationType<?> rParam : rParams)
{
if (hasParameter(rParam))
{
if (rEntity.getDefinition().getAttributes().contains(rParam))
{
rEntity.set((RelationType) rParam, getParameter(rParam));
}
else if (rParam.hasFlag(ExtraAttributes.EXTRA_ATTRIBUTE_FLAG))
{
rEntity.setExtraAttribute((RelationType) rParam,
getParameter(rParam));
}
}
}
}
/***************************************
* Internal method to abort the current execution of this fragment. It can
* be used to undo data and parameter initializations or interactive
* modifications that have been performed by this fragment. Subclasses must
* implement {@link #abort()} instead.
*
* @throws Exception On errors
*/
final void abortFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.abortFragment();
}
abort();
}
/***************************************
* Internal method that handles the invocation of {@link
* #afterInteraction(RelationType)} for this instance and all registered
* sub-fragments.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void afterFragmentInteraction(RelationType<?> rInteractionParam)
throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.afterFragmentInteraction(rInteractionParam);
}
afterInteraction(rInteractionParam);
}
/***************************************
* This method can be overridden by a subclass to indicate whether it
* supports a rollback of the modifications it has performed. The default
* implementation always returns TRUE.
*
* @return TRUE if the step implementation support a rollback
*
* @see #rollback()
* @see ProcessStep#canRollback()
*/
final boolean canFragmentRollback()
{
for (InteractionFragment rSubFragment : getSubFragments())
{
if (!rSubFragment.canFragmentRollback())
{
return false;
}
}
return canRollback();
}
/***************************************
* Internal method to finish the fragment execution. Subclasses must
* implement {@link #finish()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void finishFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.finishFragment();
}
finish();
}
/***************************************
* Checks whether this fragment contains a certain continuation parameter.
*
* @param rContinuationParam The continuation parameter to check
*
* @return TRUE if the given continuation parameter belongs to this fragment
*/
InteractionFragment getContinuationFragment(
RelationType<?> rContinuationParam)
{
InteractionFragment rContinuationFragment = null;
for (InteractionFragment rSubFragment : getSubFragments())
{
rContinuationFragment =
rSubFragment.getContinuationFragment(rContinuationParam);
if (rContinuationFragment != null)
{
break;
}
}
if (rContinuationFragment == null &&
aFragmentContinuationParams != null &&
aFragmentContinuationParams.contains(rContinuationParam))
{
rContinuationFragment = this;
}
return rContinuationFragment;
}
/***************************************
* Internal method that handles the invocation of {@link
* #handleInteraction(RelationType)} for this instance and all registered
* sub-fragments.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void handleFragmentInteraction(RelationType<?> rInteractionParam)
throws Exception
{
boolean bRootFragmentInteraction = true;
for (InteractionFragment rSubFragment : getSubFragments())
{
if (rSubFragment.hasFragmentInteraction(rInteractionParam))
{
rSubFragment.handleFragmentInteraction(rInteractionParam);
bRootFragmentInteraction = false;
break;
}
}
if (bRootFragmentInteraction || hasInteraction(rInteractionParam))
{
handleInteraction(rInteractionParam);
}
}
/***************************************
* Internal method to check whether an interaction has been caused by an
* interaction parameter from this fragment or one of it's sub-fragments.
* Subclasses must implement {@link #hasInteraction(RelationType)} instead.
*
* @param rInteractionParam The interaction parameter to check
*
* @return TRUE if the interaction was caused by a parameter of this
* fragment
*/
final boolean hasFragmentInteraction(RelationType<?> rInteractionParam)
{
for (InteractionFragment rSubFragment : getSubFragments())
{
if (rSubFragment.hasFragmentInteraction(rInteractionParam))
{
return true;
}
}
return hasInteraction(rInteractionParam);
}
/***************************************
* Internal method to initialize this fragment. Subclasses must implement
* {@link #init()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void initFragment() throws Exception
{
getSubFragments().clear();
init();
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.initFragment();
}
markFragmentInputParams();
}
/***************************************
* Internal method to prepare each interaction of this fragment. Subclasses
* must implement {@link #prepareInteraction()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void prepareFragmentInteraction() throws Exception
{
prepareInteraction();
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.prepareFragmentInteraction();
}
}
/***************************************
* Internal method to perform a rollback of data and parameter modifications
* that have been performed by this fragment. Subclasses must implement
* {@link #rollback()} instead.
*
* @throws Exception On errors
*/
final void rollbackFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.rollbackFragment();
}
rollback();
}
/***************************************
* Sets the parameter this fragment is displayed in.
*
* @param rFragmentParam The fragment parameter
*/
final void setFragmentParam(
RelationType<List<RelationType<?>>> rFragmentParam)
{
this.rFragmentParam = rFragmentParam;
}
/***************************************
* Package-internal method to associate this fragment with a particular
* interactive process step.
*
* @param rProcessStep The process step this fragment belongs to
*/
final void setProcessStep(Interaction rProcessStep)
{
this.rProcessStep = rProcessStep;
initProcessStep(rProcessStep);
}
/***************************************
* Internal method to validate the fragment's process parameters during
* state changes of the process. Subclasses must implement {@link
* #validateParameters(boolean)} instead.
*
* @see ProcessStep#validateParameters(boolean)
*/
Map<RelationType<?>, String> validateFragmentParameters(
boolean bOnInteraction)
{
HashMap<RelationType<?>, String> rErrorParams =
new HashMap<RelationType<?>, String>();
for (InteractionFragment rSubFragment : getSubFragments())
{
rErrorParams.putAll(rSubFragment.validateFragmentParameters(bOnInteraction));
}
rErrorParams.putAll(validateParameters(bOnInteraction));
return rErrorParams;
}
/***************************************
* Internal method that displays any kind of dialog fragment.
*
* @param rDialogFragment The dialog fragment
* @param rDialogListener An optional dialog listener or NULL for none
*
* @throws Exception If displaying the dialog fails
*/
private void showDialogImpl(
DialogFragment rDialogFragment,
DialogActionListener rDialogListener) throws Exception
{
if (rDialogListener != null)
{
rDialogFragment.addDialogActionListener(rDialogListener);
}
rDialogFragment.show(this);
}
//~ Inner Classes ----------------------------------------------------------
/********************************************************************
* An implementation of the {@link UploadHandler} interface that writes
* uploaded data into a process parameter. This class is used internally by
* {@link InteractionFragment#prepareUpload(RelationType, RelationType,
* int)}.
*
* @author eso
*/
class ProcessParamUploadHandler implements UploadHandler
{
//~ Instance fields ----------------------------------------------------
private RelationType<byte[]> rTargetParam;
private Pattern rContentTypePattern;
private int nMaxSize;
//~ Constructors -------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param rTargetParam The target parameter for the uploaded data
* @param rContentTypePattern A pattern that limits allowed content
* types or NULL for no restriction
* @param nMaxSize The maximum upload size
*/
public ProcessParamUploadHandler(
RelationType<byte[]> rTargetParam,
Pattern rContentTypePattern,
int nMaxSize)
{
this.rTargetParam = rTargetParam;
this.rContentTypePattern = rContentTypePattern;
this.nMaxSize = nMaxSize;
}
//~ Methods ------------------------------------------------------------
/***************************************
* {@inheritDoc}
*/
@Override
public void processUploadData(String sFilename,
String sContentType,
InputStream rDataStream) throws Exception
{
byte[] aBuf = new byte[1024 * 16];
int nRead;
if (rContentTypePattern != null &&
!rContentTypePattern.matcher(sContentType).matches())
{
error("InvalidUploadContentType");
}
ByteArrayOutputStream aOutStream = new ByteArrayOutputStream();
while ((nRead = rDataStream.read(aBuf, 0, aBuf.length)) != -1)
{
if (aOutStream.size() + nRead <= nMaxSize)
{
aOutStream.write(aBuf, 0, nRead);
}
else
{
error("UploadSizeLimitExceeded");
}
}
setParameter(rTargetParam, aOutStream.toByteArray());
removeParameterAnnotation(rTargetParam, ERROR_MESSAGE);
}
/***************************************
* Sets an error message on the target parameter and then throws an
* exception.
*
* @param sMessage The error message
*
* @throws ProcessException The error exception
*/
private void error(String sMessage) throws ProcessException
{
annotateParameter(rTargetParam, null, ERROR_MESSAGE, sMessage);
throw new ProcessException(getProcessStep(), sMessage);
}
}
}
| src/main/java/de/esoco/process/step/InteractionFragment.java | //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
// This file is a part of the 'esoco-business' project.
// Copyright 2016 Elmar Sonnenschein, esoco GmbH, Flensburg, Germany
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
package de.esoco.process.step;
import de.esoco.data.DataRelationTypes;
import de.esoco.data.SessionManager;
import de.esoco.data.UploadHandler;
import de.esoco.data.element.DataElementList.ViewDisplayType;
import de.esoco.data.element.SelectionDataElement;
import de.esoco.entity.Entity;
import de.esoco.entity.EntityRelationTypes.HierarchicalQueryMode;
import de.esoco.entity.ExtraAttributes;
import de.esoco.lib.collection.CollectionUtil;
import de.esoco.lib.expression.Predicate;
import de.esoco.lib.expression.function.AbstractAction;
import de.esoco.lib.property.Updatable;
import de.esoco.lib.property.UserInterfaceProperties;
import de.esoco.lib.property.UserInterfaceProperties.ContentType;
import de.esoco.lib.property.UserInterfaceProperties.InteractiveInputMode;
import de.esoco.process.Parameter;
import de.esoco.process.ParameterList;
import de.esoco.process.Process;
import de.esoco.process.ProcessElement;
import de.esoco.process.ProcessException;
import de.esoco.process.ProcessFragment;
import de.esoco.process.ProcessRelationTypes;
import de.esoco.process.ProcessStep;
import de.esoco.process.step.DialogFragment.DialogAction;
import de.esoco.process.step.DialogFragment.DialogActionListener;
import de.esoco.process.step.Interaction.InteractionHandler;
import de.esoco.storage.QueryPredicate;
import de.esoco.storage.StorageException;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.obrel.core.Relation;
import org.obrel.core.RelationType;
import org.obrel.core.RelationTypes;
import org.obrel.type.MetaTypes;
import static de.esoco.entity.EntityPredicates.forEntity;
import static de.esoco.entity.EntityRelationTypes.HIERARCHICAL_QUERY_MODE;
import static de.esoco.lib.property.UserInterfaceProperties.CONTENT_TYPE;
import static de.esoco.lib.property.UserInterfaceProperties.CURRENT_SELECTION;
import static de.esoco.lib.property.UserInterfaceProperties.DISABLED;
import static de.esoco.lib.property.UserInterfaceProperties.URL;
import static de.esoco.process.ProcessRelationTypes.INPUT_PARAMS;
import static de.esoco.process.ProcessRelationTypes.ORIGINAL_RELATION_TYPE;
import static de.esoco.process.ProcessRelationTypes.PARAM_UPDATE_LISTENERS;
import static org.obrel.type.StandardTypes.ERROR_MESSAGE;
/********************************************************************
* A process element subclass that serves as a fragment of an interactive
* process step. This allows to split the user interface of complex interactions
* into different parts that can more easily be re-used.
*
* @author eso
*/
public abstract class InteractionFragment extends ProcessFragment
{
//~ Static fields/initializers ---------------------------------------------
private static final long serialVersionUID = 1L;
/** The resource string for an error message box icon. */
public static final String MESSAGE_BOX_ERROR_ICON = "#imErrorMessage";
/** The resource string for a warning message box icon. */
public static final String MESSAGE_BOX_WARNING_ICON = "#imWarningMessage";
/** The resource string for a question message box icon. */
public static final String MESSAGE_BOX_QUESTION_ICON = "#imQuestionMessage";
/** The resource string for an info message box icon. */
public static final String MESSAGE_BOX_INFO_ICON = "#imInfoMessage";
private static int nNextFragmentId = 0;
//~ Instance fields --------------------------------------------------------
private int nFragmentId = nNextFragmentId++;
private Interaction rProcessStep;
private InteractionFragment rParent;
private RelationType<List<RelationType<?>>> rFragmentParam;
private List<RelationType<?>> aFragmentContinuationParams = null;
private List<RelationType<?>> aInteractionParams = new ArrayList<>();
private Set<RelationType<?>> aInputParams = new HashSet<>();
//~ Constructors -----------------------------------------------------------
/***************************************
* Creates a new instance.
*/
public InteractionFragment()
{
RelationTypes.init(getClass());
}
//~ Methods ----------------------------------------------------------------
/***************************************
* Must be implemented to initialize the interaction parameters of this
* fragment.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public abstract void init() throws Exception;
/***************************************
* Overridden to add nonexistent parameters to the list of interaction
* parameters of this instance as returned by the method {@link
* #getInteractionParameters()}. The returned collection must therefore be
* mutable (as is the case with the default parameter collection).
*
* <p>This implementation replaces the base class implementation which
* changes the interaction parameters of the process step instead of the
* fragment.</p>
*
* @param rParams The interaction parameters to add
*/
@Override
public void addDisplayParameters(
Collection<? extends RelationType<?>> rParams)
{
List<RelationType<?>> rInteractionParams = getInteractionParameters();
for (RelationType<?> rParam : rParams)
{
// do not add parameters that are displayed in panels because they
// are stored in the parameter list of the panel parameter
if (!isPanelParameter(rParam) &&
!rInteractionParams.contains(rParam))
{
rInteractionParams.add(rParam);
}
}
}
/***************************************
* @see #addInputParameters(Collection)
*/
@Override
public void addInputParameters(RelationType<?>... rParams)
{
addInputParameters(Arrays.asList(rParams));
}
/***************************************
* Adds the given parameters to the interaction and input parameters of this
* instance. The input parameters are queried with the method {@link
* #getInputParameters()}, the interaction parameters are updated with
* {@link #addDisplayParameters(Collection)}.
*
* <p>This implementation replaces the base class implementation which
* changes the interaction parameters of the process step instead of the
* fragment.</p>
*
* @param rParams The input parameters to add
*
* @see #addDisplayParameters(Collection)
*/
@Override
public void addInputParameters(
Collection<? extends RelationType<?>> rParams)
{
addDisplayParameters(rParams);
markInputParams(true, rParams);
}
/***************************************
* Convenience method to add a listener to the process step relation with
* the type {@link ProcessRelationTypes#PARAM_UPDATE_LISTENERS}. To remove a
* listener it should be removed from the parameter set directly.
*
* @param rListener The listener to add
*/
public void addParameterUpdateListener(Updatable rListener)
{
get(PARAM_UPDATE_LISTENERS).add(rListener);
}
/***************************************
* A variant of {@link #addSubFragment(String, InteractionFragment)} that
* uses the name of the fragment class for the temporary fragment parameter.
*
* @see #addSubFragment(String, InteractionFragment)
*/
public Parameter<List<RelationType<?>>> addSubFragment(
InteractionFragment rSubFragment)
{
return addSubFragment(rSubFragment.getClass().getSimpleName(),
rSubFragment);
}
/***************************************
* Adds a subordinate fragment to this instance into a temporary parameter
* and directly displays it.
*
* @see #addSubFragment(String, InteractionFragment, boolean)
*/
public Parameter<List<RelationType<?>>> addSubFragment(
String sName,
InteractionFragment rSubFragment)
{
Parameter<List<RelationType<?>>> rSubFragmentParam =
listParam(sName, RelationType.class);
addSubFragment(rSubFragmentParam.type(), rSubFragment);
return addSubFragment(sName, rSubFragment, true);
}
/***************************************
* Overridden to set the parent of the sub-fragment to this instance.
*
* @return
*
* @see ProcessFragment#addSubFragment(RelationType, InteractionFragment)
*/
@Override
public void addSubFragment(
RelationType<List<RelationType<?>>> rFragmentParam,
InteractionFragment rSubFragment)
{
rSubFragment.rParent = this;
super.addSubFragment(rFragmentParam, rSubFragment);
}
/***************************************
* Adds a subordinate fragment to this instance into a temporary parameter
* and optionally displays it. The temporary parameter relation type will be
* created with the given name by invoking {@link #listParam(String, Class)}
* and the parameter wrapper will be returned. The fragment will be added by
* invoking {@link #addSubFragment(RelationType, InteractionFragment)}.
* Furthermore the UI property {@link UserInterfaceProperties#HIDE_LABEL}
* will be set on the new fragment parameter because fragments are typically
* displayed without a label.
*
* @param sName The name of the temporary fragment parameter
* @param rSubFragment The fragment to add
* @param bDisplay TRUE to invoke {@link Parameter#display()} on the
* new fragment parameter
*
* @return The wrapper for the fragment parameter
*/
public Parameter<List<RelationType<?>>> addSubFragment(
String sName,
InteractionFragment rSubFragment,
boolean bDisplay)
{
Parameter<List<RelationType<?>>> rSubFragmentParam =
listParam(sName, RelationType.class);
addSubFragment(rSubFragmentParam.type(), rSubFragment);
if (bDisplay)
{
rSubFragmentParam.display();
}
return rSubFragmentParam.hideLabel();
}
/***************************************
* Internal method that will be invoked to attach this fragment to the given
* process step and fragment parameter.
*
* @param rProcessStep The process step to attach this instance to
* @param rFragmentParam The parameter this fragment will be stored in
*/
public void attach(
Interaction rProcessStep,
RelationType<List<RelationType<?>>> rFragmentParam)
{
this.rFragmentParam = rFragmentParam;
setProcessStep(rProcessStep);
setup();
}
/***************************************
* Can be overridden by subclasses to perform resource cleanups when the
* process ends. The default implementation does nothing.
*
* @see ProcessStep#cleanup()
*/
public void cleanup()
{
}
/***************************************
* Clears lists returned by the methods {@link #getInteractionParameters()}
* and {@link #getInputParameters()}. These lists must therefore be mutable!
*
* <p>This implementation replaces the base class implementation because the
* parent method changes the interaction parameters of the process step.</p>
*/
@Override
public void clearInteractionParameters()
{
getInteractionParameters().clear();
getInputParameters().clear();
}
/***************************************
* Clear the selection of a certain parameter by setting it's value to NULL
* and the property {@link UserInterfaceProperties#CURRENT_SELECTION} to -1.
*
* @param rParam The parameter to clear the selection of
*/
public void clearSelection(RelationType<?> rParam)
{
Object rParamValue = getParameter(rParam);
boolean bClearSelection = (rParamValue != null);
if (SelectionDataElement.class.isAssignableFrom(rParam.getTargetType()))
{
SelectionDataElement rElement = (SelectionDataElement) rParamValue;
if (!SelectionDataElement.NO_SELECTION.equals(rElement.getValue()))
{
rElement.setValue(SelectionDataElement.NO_SELECTION);
bClearSelection = true;
}
}
else
{
setParameter(rParam, null);
}
// only clear selection if one exists to prevent unnecessary updates
if (bClearSelection)
{
setUIProperty(-1, CURRENT_SELECTION, rParam);
}
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a {@link Date} datatype.
*
* @see #param(String, Class)
*/
public Parameter<Date> dateParam(String sName)
{
return param(sName, Date.class);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#deleteRelation(Relation)
*/
@Override
public void deleteRelation(Relation<?> rRelation)
{
rProcessStep.deleteRelation(rRelation);
}
/***************************************
* Enables or disables the editing of this fragment and of all it's
* children. This is achieved by clearing or setting the flag property
* {@link UserInterfaceProperties#DISABLED} on the fragment input
* parameters. Subclasses may override this method to implement a more
* specific handling but should normally also call the superclass
* implementation.
*
* @param bEnable TRUE to enable editing, FALSE to disable
*/
public void enableEdit(boolean bEnable)
{
if (bEnable)
{
clearUIFlag(DISABLED, getInputParameters());
}
else
{
setUIFlag(DISABLED, getInputParameters());
}
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.enableEdit(bEnable);
}
}
/***************************************
* Can be overridden by a fragment to execute actions when the process flow
* leaves this fragment.
*
* <p>The default implementation does nothing.</p>
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void finish() throws Exception
{
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a boolean datatype.
*
* @see #param(String, Class)
*/
public Parameter<Boolean> flagParam(String sName)
{
return param(sName, Boolean.class);
}
/***************************************
* Creates a new parameter wrapper for the relation type this fragment is
* stored in.
*
* @return the parameter wrapper for the fragment parameter
*/
public ParameterList fragmentParam()
{
return new ParameterList(rParent != null ? rParent : this,
getFragmentParameter(),
false);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#get(RelationType)
*/
@Override
public <T> T get(RelationType<T> rType)
{
return rProcessStep.get(rType);
}
/***************************************
* Returns the parameter this fragment is displayed in.
*
* @return The fragment parameter
*/
public final RelationType<List<RelationType<?>>> getFragmentParameter()
{
return rFragmentParam;
}
/***************************************
* Returns the collection of input parameters of this fragment. These must
* be a subset of {@link #getInteractionParameters()}. The default
* implementation returns a mutable collection that can been modified
* directly by a subclass. Or it can be overridden by subclasses to return
* their own input parameter collection.
*
* @return The list of this fragment's input parameters
*/
public Collection<RelationType<?>> getInputParameters()
{
return aInputParams;
}
/***************************************
* Returns the list of interaction parameters for this fragment. The default
* implementation returns a mutable list that can been modified directly by
* a subclass. Or it can be overridden by subclasses to return their own
* interaction parameter list.
*
* @return The list of this fragment's interaction parameters
*/
public List<RelationType<?>> getInteractionParameters()
{
return aInteractionParams;
}
/***************************************
* Sets the interaction handler for a certain parameter.
*
* @see Interaction#getParameterInteractionHandler(RelationType)
*/
public InteractionHandler getParameterInteractionHandler(
RelationType<?> rParam)
{
return getProcessStep().getParameterInteractionHandler(rParam);
}
/***************************************
* Returns the parent fragment of this instance.
*
* @return The parent fragment or NULL for a root fragment
*/
public final InteractionFragment getParent()
{
return rParent;
}
/***************************************
* @see ProcessFragment#getProcess()
*/
@Override
public Process getProcess()
{
return rProcessStep.getProcess();
}
/***************************************
* Returns the interactive process step this element is associated with.
*
* @return The process step this fragment belongs to
*/
@Override
public final Interaction getProcessStep()
{
return rProcessStep;
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#getRelation(RelationType)
*/
@Override
public <T> Relation<T> getRelation(RelationType<T> rType)
{
return rProcessStep.getRelation(rType);
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#getRelations(Predicate)
*/
@Override
public List<Relation<?>> getRelations(
Predicate<? super Relation<?>> rFilter)
{
return rProcessStep.getRelations(rFilter);
}
/***************************************
* Must be implemented by subclasses to handle interactions for this
* fragment. The default implementation does nothing.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void handleInteraction(RelationType<?> rInteractionParam)
throws Exception
{
}
/***************************************
* Checks whether an interaction has been caused by an interaction parameter
* from this fragment. The default implementation checks if the given
* parameter is one of this fragment's interaction parameters.
*
* @param rInteractionParam The interaction parameter to check
*
* @return TRUE if the interaction was caused by a parameter of this
* fragment
*/
public boolean hasInteraction(RelationType<?> rInteractionParam)
{
return getInputParameters().contains(rInteractionParam);
}
/***************************************
* Initializes a parameter for the display of a storage query.
*
* @param rParam The parameter to initialize the query for
* @param rEntityClass The entity class to query
* @param pCriteria The query criteria the query criteria or NULL for
* none
* @param pSortOrder The sort predicate or NULL for the default order
* @param eMode bHierarchical TRUE for a hierarchical query with a
* tree-table display
* @param rColumns The columns to display
*
* @return The generated query predicate
*/
public <E extends Entity> QueryPredicate<E> initQueryParameter(
RelationType<E> rParam,
Class<E> rEntityClass,
Predicate<? super E> pCriteria,
Predicate<? super Entity> pSortOrder,
HierarchicalQueryMode eMode,
RelationType<?>... rColumns)
{
QueryPredicate<E> qEntities = forEntity(rEntityClass, pCriteria);
qEntities.set(HIERARCHICAL_QUERY_MODE, eMode);
annotateForEntityQuery(rParam, qEntities, pSortOrder, rColumns);
return qEntities;
}
/***************************************
* @see #insertInputParameters(RelationType, RelationType...)
*/
public void insertInputParameters(
RelationType<?> rBeforeParam,
RelationType<?>... rParams)
{
insertInputParameters(rBeforeParam, Arrays.asList(rParams));
}
/***************************************
* Inserts additional parameters into the lists returned by the methods
* {@link #getInteractionParameters()} and {@link #getInputParameters()}.
* These lists must therefore be mutable!
*
* @param rBeforeParam The parameter to insert the other parameters before
* @param rParams The parameters to add
*/
public void insertInputParameters(
RelationType<?> rBeforeParam,
Collection<RelationType<?>> rParams)
{
CollectionUtil.insert(getInteractionParameters(),
rBeforeParam,
rParams);
getInputParameters().addAll(rParams);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* an integer datatype.
*
* @see #param(String, Class)
*/
public Parameter<Integer> intParam(String sName)
{
return param(sName, Integer.class);
}
/***************************************
* Create a new parameter wrapper for this fragment with a temporary
* relation type.
*
* @param sName The name of the relation type
* @param rElementType rDatatype The parameter datatype
*
* @return the parameter instance
*/
public <T> Parameter<List<T>> listParam(
String sName,
Class<? super T> rElementType)
{
return param(getTemporaryListType(sName, rElementType));
}
/***************************************
* Marks the input parameters of this fragment and all of it's
* sub-fragments.
*/
public void markFragmentInputParams()
{
get(INPUT_PARAMS).addAll(getInputParameters());
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.markFragmentInputParams();
}
}
/***************************************
* Overridden to operate on the fragment input parameters.
*
* @see ProcessElement#markInputParams(boolean, Collection)
*/
@Override
public void markInputParams(
boolean bInput,
Collection<? extends RelationType<?>> rParams)
{
Collection<RelationType<?>> rInputParams = getInputParameters();
for (RelationType<?> rParam : rParams)
{
boolean bHasParam = rInputParams.contains(rParam);
if (!bHasParam && bInput)
{
rInputParams.add(rParam);
}
else if (bHasParam && !bInput)
{
rInputParams.remove(rParam);
}
}
super.markInputParams(bInput, rParams);
}
/***************************************
* Marks a hierarchy of parameters as modified.
*
* @param rParams The list of root parameters
*/
public void markParameterHierarchyAsModified(
Collection<RelationType<?>> rParams)
{
for (RelationType<?> rParam : rParams)
{
markParameterAsModified(rParam);
if (Collection.class.isAssignableFrom(rParam.getTargetType()))
{
if (rParam.get(MetaTypes.ELEMENT_DATATYPE) ==
RelationType.class)
{
@SuppressWarnings("unchecked")
Collection<RelationType<?>> rChildParams =
(Collection<RelationType<?>>) getParameter(rParam);
if (rChildParams != null)
{
markParameterHierarchyAsModified(rChildParams);
}
}
}
}
}
/***************************************
* Notifies all listeners for parameter updates that are registered in the
* relation {@link ProcessRelationTypes#PARAM_UPDATE_LISTENERS} of this
* fragment's process step. Because relations are shared between fragments
* this will affect all fragments in the current interaction.
*/
public void notifyParameterUpdateListeners()
{
if (hasRelation(PARAM_UPDATE_LISTENERS))
{
for (Updatable rListener : get(PARAM_UPDATE_LISTENERS))
{
rListener.update();
}
}
}
/***************************************
* Creates a new temporary relation type for a list of relation types and
* returns a parameter wrapper for it.
*
* @param sName The name of the parameter list
*
* @return the parameter wrapper for the parameter list
*/
public ParameterList panel(String sName)
{
RelationType<List<RelationType<?>>> rListType =
getTemporaryListType(sName, RelationType.class);
return new ParameterList(this, rListType, true);
}
/***************************************
* Creates a new parameter wrapper for the given relation type in this
* fragment.
*
* @param rParam The parameter to wrap
*
* @return the parameter wrapper
*/
public <T> Parameter<T> param(RelationType<T> rParam)
{
return new Parameter<>(this, rParam);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* an enum datatype. The parameter will be named with the simple name of the
* enum class.
*
* @see #param(String, Class)
*/
public <E extends Enum<E>> Parameter<E> param(Class<E> rEnumClass)
{
return param(rEnumClass.getSimpleName(), rEnumClass);
}
/***************************************
* Create a new parameter wrapper for this fragment with a temporary
* relation type. If no matching temporary relation type exists already it
* will be created.
*
* @param sName The name of the relation type
* @param rDatatype The parameter datatype
*
* @return the parameter wrapper
*/
public <T> Parameter<T> param(String sName, Class<? super T> rDatatype)
{
return param(getTemporaryParameterType(sName, rDatatype));
}
/***************************************
* Creates a new temporary parameter relation type that is derived from
* another relation type. The other type must be from a different scope
* (i.e. not the same fragment) or else a name conflict will occur. The
* derived relation type will have the original relation in a meta relation
* with the type {@link ProcessRelationTypes#ORIGINAL_RELATION_TYPE}.
*
* @param rOriginalType The original relation type the new parameter is
* based on
*
* @return A new parameter wrapper for the derived relation type
*/
public <T> Parameter<T> paramLike(RelationType<T> rOriginalType)
{
Parameter<T> rDerivedParam =
param(rOriginalType.getSimpleName(), rOriginalType.getTargetType());
rDerivedParam.type().set(ORIGINAL_RELATION_TYPE, rOriginalType);
return rDerivedParam;
}
/***************************************
* Can be implemented by subclasses to initialize the interaction of this
* fragment. This method will be invoked on every iteration of this
* fragment's interaction, i.e. on the first run and every time after an
* interaction event occurred. The default implementation does nothing.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
public void prepareInteraction() throws Exception
{
}
/***************************************
* Removes parameters from the lists returned by the methods {@link
* #getInteractionParameters()} and {@link #getInputParameters()}. These
* lists must therefore be mutable!
*
* <p>This implementation replaces the base class implementation because the
* parent method changes the interaction parameters of the process step.</p>
*
* @param rParams The parameters to remove
*/
@Override
public void removeInteractionParameters(Collection<RelationType<?>> rParams)
{
getInteractionParameters().removeAll(rParams);
getInputParameters().removeAll(rParams);
}
/***************************************
* Removes a subordinate fragment that had been added previously by means of
* {@link #addSubFragment(RelationType, InteractionFragment)}.
*
* @param rFragmentParam The target parameter of the fragment parameters
* @param rSubFragment The sub-fragment instance to remove
*/
public void removeSubFragment(
RelationType<List<RelationType<?>>> rFragmentParam,
InteractionFragment rSubFragment)
{
get(INPUT_PARAMS).removeAll(rSubFragment.getInputParameters());
get(INPUT_PARAMS).remove(rFragmentParam);
getInteractionParameters().remove(rFragmentParam);
getSubFragments().remove(rSubFragment);
deleteParameters(rFragmentParam);
rSubFragment.setProcessStep(null);
rSubFragment.rParent = null;
}
/***************************************
* Overridden to forward the call to the enclosing process step.
*
* @see ProcessStep#set(RelationType, Object)
*/
@Override
public <T> Relation<T> set(RelationType<T> rType, T rTarget)
{
return rProcessStep.set(rType, rTarget);
}
/***************************************
* Overridden to remember the continuation parameters of this fragment.
*
* @see ProcessFragment#setContinueOnInteraction(boolean, RelationType...)
*/
@Override
public void setContinueOnInteraction(
boolean bContinue,
RelationType<?>... rParams)
{
List<RelationType<?>> aParamList = Arrays.asList(rParams);
if (bContinue)
{
if (aFragmentContinuationParams == null)
{
aFragmentContinuationParams = new ArrayList<RelationType<?>>();
}
aFragmentContinuationParams.addAll(aParamList);
}
else if (aFragmentContinuationParams != null)
{
aFragmentContinuationParams.removeAll(aParamList);
}
super.setContinueOnInteraction(bContinue, rParams);
}
/***************************************
* Sets the interaction handler for a certain parameter.
*
* @see Interaction#setParameterInteractionHandler(RelationType, InteractionHandler)
*/
public void setParameterInteractionHandler(
RelationType<?> rParam,
InteractionHandler rInteractionHandler)
{
getProcessStep().setParameterInteractionHandler(rParam,
rInteractionHandler);
}
/***************************************
* Convenience method to create a new temporary parameter relation type with
* a string datatype.
*
* @see #param(String, Class)
*/
public Parameter<String> textParam(String sName)
{
return param(sName, String.class);
}
/***************************************
* Request a complete update of this fragment's UI by marking all
* interaction parameters including their hierarchy as modified.
*/
public void updateUserInterface()
{
markParameterHierarchyAsModified(getInteractionParameters());
}
/***************************************
* This method can be overridden by subclasses to validate process
* parameters during state changes of the process. The default
* implementation returns an new empty map instance that may be modified
* freely by overriding methods to add their own error messages if
* necessary.
*
* @see ProcessStep#validateParameters(boolean)
*/
public Map<RelationType<?>, String> validateParameters(
boolean bOnInteraction)
{
return new HashMap<RelationType<?>, String>();
}
/***************************************
* This method will be invoked if the current execution of this fragment is
* aborted and can be overridden by subclasses to perform data resets
* similar to the {@link #rollback()} method.
*/
protected void abort()
{
}
/***************************************
* Can be implemented by subclasses to react on interactions that occurred
* in other fragments. This method will be invoked after {@link
* #handleInteraction(RelationType)}. The default implementation does
* nothing.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
protected void afterInteraction(RelationType<?> rInteractionParam)
throws Exception
{
}
/***************************************
* This method can be overridden by a subclass to indicate whether it
* supports a rollback of the modifications it has performed. The default
* implementation always returns TRUE.
*
* @return TRUE if the step implementation support a rollback
*
* @see #rollback()
* @see ProcessStep#canRollback()
*/
protected boolean canRollback()
{
return true;
}
/***************************************
* Overridden to return a package name that is relative to the current
* fragment instance.
*
* @see ProcessFragment#getTemporaryParameterPackage()
*/
@Override
protected String getTemporaryParameterPackage()
{
return getClass().getSimpleName().toLowerCase() + nFragmentId;
}
/***************************************
* Will be invoked after the process step of this fragment has been set. Can
* be implemented by subclasses to initialize process step-specific
* parameters. The default implementation does nothing.
*
* @param rProcessStep The process step of this fragment
*/
protected void initProcessStep(Interaction rProcessStep)
{
}
/***************************************
* Prepares the upload of a file into a process parameter. This requires two
* parameters. One string parameter that will be configured to invoke a file
* chooser and then holds the name of the selected file. This parameter must
* be configured as an input parameter. And a target parameter that will
* receive the result of a successful file upload.
*
* @param rFileSelectParam The parameter for the file selection
* @param rTargetParam The target parameter for the file content
* @param rContentTypePattern A pattern that limits allowed content types
* or NULL for no restriction
* @param nMaxSize The maximum upload size
*
* @throws Exception If preparing the upload fails
*/
protected void prepareUpload(RelationType<String> rFileSelectParam,
RelationType<byte[]> rTargetParam,
Pattern rContentTypePattern,
int nMaxSize) throws Exception
{
final SessionManager rSessionManager =
getParameter(DataRelationTypes.SESSION_MANAGER);
ProcessParamUploadHandler aUploadHandler =
new ProcessParamUploadHandler(rTargetParam,
rContentTypePattern,
nMaxSize);
String sOldUrl = getUIProperty(URL, rFileSelectParam);
if (sOldUrl != null)
{
rSessionManager.removeUpload(sOldUrl);
getProcessStep().removeFinishAction(sOldUrl);
}
final String sUploadUrl = rSessionManager.prepareUpload(aUploadHandler);
setUIProperty(CONTENT_TYPE, ContentType.FILE_UPLOAD, rFileSelectParam);
setUIProperty(URL, sUploadUrl, rFileSelectParam);
setInteractive(InteractiveInputMode.ACTION, rFileSelectParam);
getProcessStep().addFinishAction(sUploadUrl,
new AbstractAction<ProcessStep>("removeUpload")
{
@Override
public void execute(ProcessStep rValue)
{
rSessionManager.removeUpload(sUploadUrl);
}
});
}
/***************************************
* Can be overridden to perform a rollback of data and parameter
* modifications that have been performed by this fragment. By default all
* fragments are assumed to be capable of being rolled back. The default
* implementation does nothing.
*
* @see ProcessStep#rollback()
*/
protected void rollback() throws Exception
{
}
/***************************************
* Sets the values of process parameters from the attributes of an entity.
* To make this work the given relation types must be entity attribute types
* which will then be set as process parameters. The attributes can either
* by direct or extra attributes (which must have the extra attribute flag
* set).
*
* <p>To set modified parameter values back into the entity the method
* {@link #updateEntityFromParameterValues(Entity, List)} can be invoked.
* </p>
*
* @param rEntity The entity to read the attributes from
* @param rAttributes The entity attributes and process parameters
*
* @throws StorageException If querying an extra attribute fails
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void setParameterValuesFromEntity(
Entity rEntity,
List<RelationType<?>> rAttributes) throws StorageException
{
for (RelationType<?> rParam : rAttributes)
{
if (rEntity.getDefinition().getAttributes().contains(rParam))
{
Object rValue =
rEntity.hasRelation(rParam) ? rEntity.get(rParam)
: rParam.initialValue(rEntity);
setParameter((RelationType) rParam, rValue);
}
else if (rParam.hasFlag(ExtraAttributes.EXTRA_ATTRIBUTE_FLAG))
{
setParameter((RelationType) rParam,
rEntity.getExtraAttribute(rParam, null));
}
}
}
/***************************************
* @see #setParameterValuesFromEntity(Entity, List)
*/
protected void setParameterValuesFromEntity(
Entity rEntity,
RelationType<?>... rParams) throws StorageException
{
setParameterValuesFromEntity(rEntity, Arrays.asList(rParams));
}
/***************************************
* Sets the parent value.
*
* @param rParent The parent value
*/
protected final void setParent(InteractionFragment rParent)
{
this.rParent = rParent;
}
/***************************************
* Can be overridden to setup the internal state of a new fragment instance.
* Other than {@link #init()} this method will only be invoked once, right
* after an instance has been added to it's process step. The default
* implementation does nothing.
*/
protected void setup()
{
}
/***************************************
* Adds a sub-fragment to be displayed as a modal dialog.
*
* @see #showDialog(String, InteractionFragment, ViewDisplayType,
* DialogActionListener, DialogAction...)
*/
protected DialogFragment showDialog(String sParamNameTemplate,
InteractionFragment rContentFragment,
DialogActionListener rDialogListener,
DialogAction... rDialogActions)
throws Exception
{
return showDialog(sParamNameTemplate,
rContentFragment,
true,
rDialogListener,
Arrays.asList(rDialogActions));
}
/***************************************
* Adds a sub-fragment to be displayed as a modal dialog.
*
* @see #showDialog(String, InteractionFragment, boolean, String,
* DialogActionListener, Collection)
*/
protected DialogFragment showDialog(
String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions) throws Exception
{
return showDialog(sParamNameTemplate,
rContentFragment,
bModal,
null,
rDialogListener,
rDialogActions);
}
/***************************************
* Adds a sub-fragment to be displayed as a dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* <p>If the creating code needs to programmatically close the dialog view
* instead of by a button click of the user it can do so by invoking the
* {@link ViewFragment#hide()} method on the returned view fragment instance
* on a corresponding interaction.</p>
*
* @param sParamNameTemplate The name template to be used for generated
* dialog parameter names or NULL to derive it
* from the content fragment
* @param rContentFragment The fragment to be displayed as the dialog
* content
* @param bModal TRUE for a modal view
* @param sQuestion A string (typically a question) that will be
* displayed next to the dialog action buttons.
* @param rDialogListener The dialog action listener or NULL for none
* @param rDialogActions The actions to be displayed as the dialog
* buttons
*
* @return The new dialog fragment instance
*
* @throws Exception If displaying the dialog fails
*/
protected DialogFragment showDialog(
String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal,
String sQuestion,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions) throws Exception
{
DialogFragment aDialog =
new DialogFragment(sParamNameTemplate,
rContentFragment,
bModal,
sQuestion,
rDialogActions);
showDialogImpl(aDialog, rDialogListener);
return aDialog;
}
/***************************************
* Displays a message with an error icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showErrorMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_ERROR_ICON,
null,
DialogAction.OK);
}
/***************************************
* Displays a message with an info icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showInfoMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_INFO_ICON,
null,
DialogAction.OK);
}
/***************************************
* Displays a process message in a message box dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showMessageBox(
String sMessage,
String sIcon,
DialogActionListener rDialogListener,
DialogAction... rDialogActions)
{
return showMessageBox(sMessage,
sIcon,
rDialogListener,
Arrays.asList(rDialogActions));
}
/***************************************
* Displays a process message in a message box dialog. The parameter for the
* dialog fragment will be added automatically to the input parameters of
* this instance. Therefore the parameter lists of this instance MUST be
* mutable!
*
* <p>If one or more extras parameters are given they will be displayed
* between the message and the dialog buttons. Any necessary initialization
* of these parameters including UI properties must be done by the invoking
* code before invoking the message box.</p>
*
* @param sMessage The message to be displayed in the message box
* @param sIcon The resource name for an icon or NULL for the
* standard icon.
* @param rDialogListener The dialog action listener or NULL for none
* @param rDialogActions The actions to be displayed as the message box
* buttons
* @param rExtraParams Optional extra parameters to be displayed in the
* message box
*
* @return The view fragment that has been created for the message box
*/
protected MessageBoxFragment showMessageBox(
String sMessage,
String sIcon,
DialogActionListener rDialogListener,
Collection<DialogAction> rDialogActions,
RelationType<?>... rExtraParams)
{
MessageBoxFragment aMessageBox =
new MessageBoxFragment(sMessage,
sIcon,
rDialogActions,
rExtraParams);
try
{
showDialogImpl(aMessageBox, rDialogListener);
}
catch (Exception e)
{
// message boxes should not fail
throw new IllegalStateException(e);
}
return aMessageBox;
}
/***************************************
* Displays a modal dialog with a name prefix that is derived from the name
* of the content fragment.
*
* @see #showDialog(String, InteractionFragment, boolean,
* DialogActionListener, Collection)
*/
protected DialogFragment showModalDialog(
InteractionFragment rContentFragment,
Collection<DialogAction> rDialogActions) throws Exception
{
return showDialog(null, rContentFragment, true, null, rDialogActions);
}
/***************************************
* Adds a sub-fragment to be displayed as a view. The parameter for the view
* fragment will be added automatically to the input parameters of this
* instance. Therefore the parameter lists of this instance MUST be mutable!
*
* <p>Because a view has no explicit buttons like dialogs it must be closed
* by the creating code by invoking the {@link ViewFragment#hide()} method
* on the returned view fragment instance on a corresponding interaction.
* </p>
*
* @param sParamNameTemplate The name template to be used for generated
* view parameter names
* @param rContentFragment The fragment to be displayed as the view
* content
* @param bModal TRUE for a modal view
*
* @return The new view fragment to provide access to it's method {@link
* ViewFragment#hide()}
*
* @throws Exception If displaying the dialog fails
*/
protected ViewFragment showView(String sParamNameTemplate,
InteractionFragment rContentFragment,
boolean bModal) throws Exception
{
ViewFragment aViewFragment =
new ViewFragment(sParamNameTemplate,
rContentFragment,
bModal ? ViewDisplayType.MODAL_VIEW
: ViewDisplayType.VIEW);
aViewFragment.show(this);
return aViewFragment;
}
/***************************************
* Displays a message with an warning icon and a single OK button.
*
* @see #showMessageBox(String, String, DialogActionListener, Collection,
* RelationType...)
*/
protected MessageBoxFragment showWarningMessage(String sMessage)
{
return showMessageBox(sMessage,
MESSAGE_BOX_WARNING_ICON,
null,
DialogAction.OK);
}
/***************************************
* Updates the attributes of an entity from the process parameter values
* that are stored with the given entity attribute relation types. The
* attributes can either be direct or extra attributes.
*
* <p>To set the process parameters from entity attributes the reverse
* method {@link #setParameterValuesFromEntity(Entity, List)} can be used.
* </p>
*
* @param rEntity The entity to update
* @param rParams The process parameter and entity attribute relation types
*
* @throws StorageException if setting an extra attribute fails
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void updateEntityFromParameterValues(
Entity rEntity,
List<RelationType<?>> rParams) throws StorageException
{
for (RelationType<?> rParam : rParams)
{
if (hasParameter(rParam))
{
if (rEntity.getDefinition().getAttributes().contains(rParam))
{
rEntity.set((RelationType) rParam, getParameter(rParam));
}
else if (rParam.hasFlag(ExtraAttributes.EXTRA_ATTRIBUTE_FLAG))
{
rEntity.setExtraAttribute((RelationType) rParam,
getParameter(rParam));
}
}
}
}
/***************************************
* Internal method to abort the current execution of this fragment. It can
* be used to undo data and parameter initializations or interactive
* modifications that have been performed by this fragment. Subclasses must
* implement {@link #abort()} instead.
*
* @throws Exception On errors
*/
final void abortFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.abortFragment();
}
abort();
}
/***************************************
* Internal method that handles the invocation of {@link
* #afterInteraction(RelationType)} for this instance and all registered
* sub-fragments.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void afterFragmentInteraction(RelationType<?> rInteractionParam)
throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.afterFragmentInteraction(rInteractionParam);
}
afterInteraction(rInteractionParam);
}
/***************************************
* This method can be overridden by a subclass to indicate whether it
* supports a rollback of the modifications it has performed. The default
* implementation always returns TRUE.
*
* @return TRUE if the step implementation support a rollback
*
* @see #rollback()
* @see ProcessStep#canRollback()
*/
final boolean canFragmentRollback()
{
for (InteractionFragment rSubFragment : getSubFragments())
{
if (!rSubFragment.canFragmentRollback())
{
return false;
}
}
return canRollback();
}
/***************************************
* Internal method to finish the fragment execution. Subclasses must
* implement {@link #finish()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void finishFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.finishFragment();
}
finish();
}
/***************************************
* Checks whether this fragment contains a certain continuation parameter.
*
* @param rContinuationParam The continuation parameter to check
*
* @return TRUE if the given continuation parameter belongs to this fragment
*/
InteractionFragment getContinuationFragment(
RelationType<?> rContinuationParam)
{
InteractionFragment rContinuationFragment = null;
for (InteractionFragment rSubFragment : getSubFragments())
{
rContinuationFragment =
rSubFragment.getContinuationFragment(rContinuationParam);
if (rContinuationFragment != null)
{
break;
}
}
if (rContinuationFragment == null &&
aFragmentContinuationParams != null &&
aFragmentContinuationParams.contains(rContinuationParam))
{
rContinuationFragment = this;
}
return rContinuationFragment;
}
/***************************************
* Internal method that handles the invocation of {@link
* #handleInteraction(RelationType)} for this instance and all registered
* sub-fragments.
*
* @param rInteractionParam The interaction parameter
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void handleFragmentInteraction(RelationType<?> rInteractionParam)
throws Exception
{
boolean bRootFragmentInteraction = true;
for (InteractionFragment rSubFragment : getSubFragments())
{
if (rSubFragment.hasFragmentInteraction(rInteractionParam))
{
rSubFragment.handleFragmentInteraction(rInteractionParam);
bRootFragmentInteraction = false;
break;
}
}
if (bRootFragmentInteraction || hasInteraction(rInteractionParam))
{
handleInteraction(rInteractionParam);
}
}
/***************************************
* Internal method to check whether an interaction has been caused by an
* interaction parameter from this fragment or one of it's sub-fragments.
* Subclasses must implement {@link #hasInteraction(RelationType)} instead.
*
* @param rInteractionParam The interaction parameter to check
*
* @return TRUE if the interaction was caused by a parameter of this
* fragment
*/
final boolean hasFragmentInteraction(RelationType<?> rInteractionParam)
{
for (InteractionFragment rSubFragment : getSubFragments())
{
if (rSubFragment.hasFragmentInteraction(rInteractionParam))
{
return true;
}
}
return hasInteraction(rInteractionParam);
}
/***************************************
* Internal method to initialize this fragment. Subclasses must implement
* {@link #init()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void initFragment() throws Exception
{
getSubFragments().clear();
init();
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.initFragment();
}
markFragmentInputParams();
}
/***************************************
* Internal method to prepare each interaction of this fragment. Subclasses
* must implement {@link #prepareInteraction()} instead.
*
* @throws Exception Any kind of exception may be thrown in case of errors
*/
final void prepareFragmentInteraction() throws Exception
{
prepareInteraction();
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.prepareFragmentInteraction();
}
}
/***************************************
* Internal method to perform a rollback of data and parameter modifications
* that have been performed by this fragment. Subclasses must implement
* {@link #rollback()} instead.
*
* @throws Exception On errors
*/
final void rollbackFragment() throws Exception
{
for (InteractionFragment rSubFragment : getSubFragments())
{
rSubFragment.rollbackFragment();
}
rollback();
}
/***************************************
* Sets the parameter this fragment is displayed in.
*
* @param rFragmentParam The fragment parameter
*/
final void setFragmentParam(
RelationType<List<RelationType<?>>> rFragmentParam)
{
this.rFragmentParam = rFragmentParam;
}
/***************************************
* Package-internal method to associate this fragment with a particular
* interactive process step.
*
* @param rProcessStep The process step this fragment belongs to
*/
final void setProcessStep(Interaction rProcessStep)
{
this.rProcessStep = rProcessStep;
initProcessStep(rProcessStep);
}
/***************************************
* Internal method to validate the fragment's process parameters during
* state changes of the process. Subclasses must implement {@link
* #validateParameters(boolean)} instead.
*
* @see ProcessStep#validateParameters(boolean)
*/
Map<RelationType<?>, String> validateFragmentParameters(
boolean bOnInteraction)
{
HashMap<RelationType<?>, String> rErrorParams =
new HashMap<RelationType<?>, String>();
for (InteractionFragment rSubFragment : getSubFragments())
{
rErrorParams.putAll(rSubFragment.validateFragmentParameters(bOnInteraction));
}
rErrorParams.putAll(validateParameters(bOnInteraction));
return rErrorParams;
}
/***************************************
* Internal method that displays any kind of dialog fragment.
*
* @param rDialogFragment The dialog fragment
* @param rDialogListener An optional dialog listener or NULL for none
*
* @throws Exception If displaying the dialog fails
*/
private void showDialogImpl(
DialogFragment rDialogFragment,
DialogActionListener rDialogListener) throws Exception
{
if (rDialogListener != null)
{
rDialogFragment.addDialogActionListener(rDialogListener);
}
rDialogFragment.show(this);
}
//~ Inner Classes ----------------------------------------------------------
/********************************************************************
* An implementation of the {@link UploadHandler} interface that writes
* uploaded data into a process parameter. This class is used internally by
* {@link InteractionFragment#prepareUpload(RelationType, RelationType,
* int)}.
*
* @author eso
*/
class ProcessParamUploadHandler implements UploadHandler
{
//~ Instance fields ----------------------------------------------------
private RelationType<byte[]> rTargetParam;
private Pattern rContentTypePattern;
private int nMaxSize;
//~ Constructors -------------------------------------------------------
/***************************************
* Creates a new instance.
*
* @param rTargetParam The target parameter for the uploaded data
* @param rContentTypePattern A pattern that limits allowed content
* types or NULL for no restriction
* @param nMaxSize The maximum upload size
*/
public ProcessParamUploadHandler(
RelationType<byte[]> rTargetParam,
Pattern rContentTypePattern,
int nMaxSize)
{
this.rTargetParam = rTargetParam;
this.rContentTypePattern = rContentTypePattern;
this.nMaxSize = nMaxSize;
}
//~ Methods ------------------------------------------------------------
/***************************************
* {@inheritDoc}
*/
@Override
public void processUploadData(String sFilename,
String sContentType,
InputStream rDataStream) throws Exception
{
byte[] aBuf = new byte[1024 * 16];
int nRead;
if (rContentTypePattern != null &&
!rContentTypePattern.matcher(sContentType).matches())
{
error("InvalidUploadContentType");
}
ByteArrayOutputStream aOutStream = new ByteArrayOutputStream();
while ((nRead = rDataStream.read(aBuf, 0, aBuf.length)) != -1)
{
if (aOutStream.size() + nRead <= nMaxSize)
{
aOutStream.write(aBuf, 0, nRead);
}
else
{
error("UploadSizeLimitExceeded");
}
}
setParameter(rTargetParam, aOutStream.toByteArray());
removeParameterAnnotation(rTargetParam, ERROR_MESSAGE);
}
/***************************************
* Sets an error message on the target parameter and then throws an
* exception.
*
* @param sMessage The error message
*
* @throws ProcessException The error exception
*/
private void error(String sMessage) throws ProcessException
{
annotateParameter(rTargetParam, null, ERROR_MESSAGE, sMessage);
throw new ProcessException(getProcessStep(), sMessage);
}
}
}
| FRAMEWORK-167: Removal of sub-fragments is not working correctly
Changes reverted - not a bug but necessary for certain use cases | src/main/java/de/esoco/process/step/InteractionFragment.java | FRAMEWORK-167: Removal of sub-fragments is not working correctly | <ide><path>rc/main/java/de/esoco/process/step/InteractionFragment.java
<ide> String sName,
<ide> InteractionFragment rSubFragment)
<ide> {
<del> Parameter<List<RelationType<?>>> rSubFragmentParam =
<del> listParam(sName, RelationType.class);
<del>
<del> addSubFragment(rSubFragmentParam.type(), rSubFragment);
<del>
<ide> return addSubFragment(sName, rSubFragment, true);
<ide> }
<ide>
<ide> {
<ide> get(INPUT_PARAMS).removeAll(rSubFragment.getInputParameters());
<ide> get(INPUT_PARAMS).remove(rFragmentParam);
<del> getInteractionParameters().remove(rFragmentParam);
<ide> getSubFragments().remove(rSubFragment);
<ide> deleteParameters(rFragmentParam);
<ide> rSubFragment.setProcessStep(null); |
|
Java | apache-2.0 | 1c39037da2dbf759ef23ded1262b3d0066345ac7 | 0 | wschaeferB/autopsy,millmanorama/autopsy,mhmdfy/autopsy,karlmortensen/autopsy,dgrove727/autopsy,maxrp/autopsy,eXcomm/autopsy,karlmortensen/autopsy,millmanorama/autopsy,karlmortensen/autopsy,sidheshenator/autopsy,wschaeferB/autopsy,rcordovano/autopsy,wschaeferB/autopsy,raman-bt/autopsy,esaunders/autopsy,APriestman/autopsy,maxrp/autopsy,raman-bt/autopsy,esaunders/autopsy,mhmdfy/autopsy,APriestman/autopsy,maxrp/autopsy,esaunders/autopsy,karlmortensen/autopsy,eXcomm/autopsy,eXcomm/autopsy,eXcomm/autopsy,APriestman/autopsy,narfindustries/autopsy,narfindustries/autopsy,wschaeferB/autopsy,APriestman/autopsy,APriestman/autopsy,wschaeferB/autopsy,sidheshenator/autopsy,sidheshenator/autopsy,rcordovano/autopsy,raman-bt/autopsy,raman-bt/autopsy,raman-bt/autopsy,narfindustries/autopsy,APriestman/autopsy,dgrove727/autopsy,mhmdfy/autopsy,rcordovano/autopsy,APriestman/autopsy,rcordovano/autopsy,raman-bt/autopsy,dgrove727/autopsy,raman-bt/autopsy,sidheshenator/autopsy,mhmdfy/autopsy,millmanorama/autopsy,rcordovano/autopsy,esaunders/autopsy,esaunders/autopsy,millmanorama/autopsy,rcordovano/autopsy,maxrp/autopsy | /*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.recentactivity;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Make a system call through a system shell in a platform-independent manner in
* Java. <br /> This class only demonstrate a 'dir' or 'ls' within current
* (execution) path, if no parameters are used. If parameters are used, the
* first one is the system command to execute, the others are its system command
* parameters. <br /> To be system independent, an <b><a
* href="http://www.allapplabs.com/java_design_patterns/abstract_factory_pattern.htm">
* Abstract Factory Pattern</a></b> will be used to build the right underlying
* system shell in which the system command will be executed.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
* @see <a href="http://stackoverflow.com/questions/236737#236873"> How to make
* a system call that returns the stdout output as a string in various
* languages?</a>
*/
public final class JavaSystemCaller {
/**
* Execute a system command. <br /> Default is 'ls' in current directory if
* no parameters, or a system command (if Windows, it is automatically
* translated to 'dir')
*
* @param args first element is the system command, the others are its
* parameters (NOT NULL)
* @throws IllegalArgumentException if one parameters is null or empty.
* 'args' can be empty (default 'ls' performed then)
*/
// public static void main(final String[] args) {
// String anOutput = "";
// if (args.length == 0) {
// anOutput = JavaSystemCaller.Exec.execute("ls");
// } else {
// String[] someParameters = null;
// anOutput = JavaSystemCaller.Exec.execute(args[0], someParameters);
// }
// logger.log(Level.INFO, "Final output: " + anOutput);
// }
/**
* Asynchronously read the output of a given input stream. <br /> Any
* exception during execution of the command in managed in this thread.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class StreamGobbler extends Thread {
private static final Logger logger = Logger.getLogger(StreamGobbler.class.getName());
private InputStream is;
private String type;
private StringBuffer output = new StringBuffer();
private boolean doRun = false;
StreamGobbler(final InputStream anIs, final String aType) {
this.is = anIs;
this.type = aType;
this.doRun = true;
}
/**
* Asynchronous read of the input stream. <br /> Will report output as
* its its displayed.
*
* @see java.lang.Thread#run()
*/
@Override
public final void run() {
final String SEP = System.getProperty("line.separator");
try {
final InputStreamReader isr = new InputStreamReader(this.is);
final BufferedReader br = new BufferedReader(isr);
String line = null;
while ( doRun && (line = br.readLine()) != null) {
logger.log(Level.INFO, this.type + ">" + line);
this.output.append(line + SEP);
}
} catch (final IOException ioe) {
logger.log(Level.WARNING, ioe.getMessage());
}
}
/**
* Stop running the stream gobbler
* The thread will exit out gracefully after the current readLine() on stream unblocks
*/
public void stopRun() {
doRun = false;
}
/**
* Get output filled asynchronously. <br /> Should be called after
* execution
*
* @return final output
*/
public final String getOutput() {
return this.output.toString();
}
}
/**
* Execute a system command in the appropriate shell. <br /> Read
* asynchronously stdout and stderr to report any result.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class Exec {
private static final Logger logger = Logger.getLogger(Exec.class.getName());
private static Process proc = null;
private static String command = null;
private static JavaSystemCaller.IShell aShell = null;
/**
* Execute a system command. <br /> Listen asynchronously to stdout and
* stderr
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
* @return final output (stdout only)
*/
public static String execute(final String aCommand, final String... someParameters) throws IOException, InterruptedException {
String output = "";
JavaSystemCaller.ExecEnvironmentFactory anExecEnvFactory = getExecEnvironmentFactory(aCommand, someParameters);
aShell = anExecEnvFactory.createShell();
command = anExecEnvFactory.createCommandLine();
final Runtime rt = Runtime.getRuntime();
logger.log(Level.INFO, "Executing " + aShell.getShellCommand() + " " + command);
proc = rt.exec(aShell.getShellCommand() + " " + command);
try {
//give time to fully start the process
Thread.sleep(2000);
} catch (InterruptedException ex) {
logger.log(Level.WARNING, "Pause interrupted");
}
// any error message?
final JavaSystemCaller.StreamGobbler errorGobbler = new JavaSystemCaller.StreamGobbler(proc.getErrorStream(), "ERROR");
// any output?
final JavaSystemCaller.StreamGobbler outputGobbler = new JavaSystemCaller.StreamGobbler(proc.getInputStream(), "OUTPUT");
// kick them off
errorGobbler.start();
outputGobbler.start();
// any error???
final int exitVal = proc.waitFor();
logger.log(Level.INFO, "ExitValue: " + exitVal);
errorGobbler.stopRun();
outputGobbler.stopRun();
output = outputGobbler.getOutput();
return output;
}
private static JavaSystemCaller.ExecEnvironmentFactory getExecEnvironmentFactory(final String aCommand, final String... someParameters) {
final String anOSName = System.getProperty("os.name");
if (anOSName.toLowerCase().startsWith("windows")) {
return new JavaSystemCaller.WindowsExecEnvFactory(aCommand, someParameters);
}
return new JavaSystemCaller.UnixExecEnvFactory(aCommand, someParameters);
// TODO be more specific for other OS.
}
private Exec() { /*
*
*/ }
public static synchronized void stop() {
logger.log(Level.INFO, "Stopping Execution of: " + command);
if (proc != null) {
proc.destroy();
proc = null;
}
}
public static Process getProcess() {
return proc;
}
}
private JavaSystemCaller() { /*
*
*/ }
/*
* ABSTRACT FACTORY PATTERN
*/
/**
* Environment needed to be build for the Exec class to be able to execute
* the system command. <br /> Must have the right shell and the right
* command line. <br />
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public abstract static class ExecEnvironmentFactory {
private String command = null;
private ArrayList<String> parameters = new ArrayList<String>();
final String getCommand() {
return this.command;
}
final ArrayList<String> getParameters() {
return this.parameters;
}
/**
* Builds an execution environment for a system command to be played.
* <br /> Independent from the OS.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public ExecEnvironmentFactory(final String aCommand, final String... someParameters) {
if (aCommand == null || aCommand.length() == 0) {
throw new IllegalArgumentException("Command must not be empty");
}
this.command = aCommand;
for (int i = 0; i < someParameters.length; i++) {
final String aParameter = someParameters[i];
if (aParameter == null || aParameter.length() == 0) {
throw new IllegalArgumentException("Parameter n° '" + i + "' must not be empty");
}
this.parameters.add(aParameter);
}
}
/**
* Builds the right Shell for the current OS. <br /> Allow for
* independent platform execution.
*
* @return right shell, NEVER NULL
*/
public abstract JavaSystemCaller.IShell createShell();
/**
* Builds the right command line for the current OS. <br /> Means that a
* command might be translated, if it does not fit the right OS ('dir'
* => 'ls' on unix)
*
* @return right complete command line, with parameters added (NEVER
* NULL)
*/
public abstract String createCommandLine();
protected final String buildCommandLine(final String aCommand, final ArrayList<String> someParameters) {
final StringBuilder aCommandLine = new StringBuilder();
aCommandLine.append(aCommand);
for (String aParameter : someParameters) {
aCommandLine.append(" ");
aCommandLine.append(aParameter);
}
return aCommandLine.toString();
}
}
/**
* Builds a Execution Environment for Windows. <br /> Cmd with windows
* commands
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class WindowsExecEnvFactory extends JavaSystemCaller.ExecEnvironmentFactory {
/**
* Builds an execution environment for a Windows system command to be
* played. <br /> Any command not from windows will be translated in its
* windows equivalent if possible.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public WindowsExecEnvFactory(final String aCommand, final String... someParameters) {
super(aCommand, someParameters);
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createShell()
*/
@Override
public JavaSystemCaller.IShell createShell() {
return new JavaSystemCaller.WindowsShell();
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createCommandLine()
*/
@Override
public String createCommandLine() {
String aCommand = getCommand();
if (aCommand.toLowerCase().trim().equals("ls")) {
aCommand = "dir";
}
// TODO translates other Unix commands
return buildCommandLine(aCommand, getParameters());
}
}
/**
* Builds a Execution Environment for Unix. <br /> Sh with Unix commands
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class UnixExecEnvFactory extends JavaSystemCaller.ExecEnvironmentFactory {
/**
* Builds an execution environment for a Unix system command to be
* played. <br /> Any command not from Unix will be translated in its
* Unix equivalent if possible.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public UnixExecEnvFactory(final String aCommand, final String... someParameters) {
super(aCommand, someParameters);
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createShell()
*/
@Override
public JavaSystemCaller.IShell createShell() {
return new JavaSystemCaller.UnixShell();
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createCommandLine()
*/
@Override
public String createCommandLine() {
String aCommand = getCommand();
if (aCommand.toLowerCase().trim().equals("dir")) {
aCommand = "ls";
}
// TODO translates other Windows commands
return buildCommandLine(aCommand, getParameters());
}
}
/**
* System Shell with its right OS command. <br /> 'cmd' for Windows or 'sh'
* for Unix, ...
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public interface IShell {
/**
* Get the right shell command. <br /> Used to launch a new shell
*
* @return command used to launch a Shell (NEVEL NULL)
*/
String getShellCommand();
}
/**
* Windows shell (cmd). <br /> More accurately 'cmd /C'
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class WindowsShell implements JavaSystemCaller.IShell {
/**
* @see test.JavaSystemCaller.IShell#getShellCommand()
*/
@Override
public final String getShellCommand() {
final String osName = System.getProperty("os.name");
if (osName.equals("Windows 95")) {
return "command.com /C";
}
return "cmd.exe /C";
}
}
/**
* Unix shell (sh). <br /> More accurately 'sh -C'
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class UnixShell implements JavaSystemCaller.IShell {
/**
* @see test.JavaSystemCaller.IShell#getShellCommand()
*/
@Override
public final String getShellCommand() {
return "/bin/sh -c";
}
}
} | RecentActivity/src/org/sleuthkit/autopsy/recentactivity/JavaSystemCaller.java | /*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.recentactivity;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Make a system call through a system shell in a platform-independent manner in
* Java. <br /> This class only demonstrate a 'dir' or 'ls' within current
* (execution) path, if no parameters are used. If parameters are used, the
* first one is the system command to execute, the others are its system command
* parameters. <br /> To be system independent, an <b><a
* href="http://www.allapplabs.com/java_design_patterns/abstract_factory_pattern.htm">
* Abstract Factory Pattern</a></b> will be used to build the right underlying
* system shell in which the system command will be executed.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
* @see <a href="http://stackoverflow.com/questions/236737#236873"> How to make
* a system call that returns the stdout output as a string in various
* languages?</a>
*/
public final class JavaSystemCaller {
/**
* Execute a system command. <br /> Default is 'ls' in current directory if
* no parameters, or a system command (if Windows, it is automatically
* translated to 'dir')
*
* @param args first element is the system command, the others are its
* parameters (NOT NULL)
* @throws IllegalArgumentException if one parameters is null or empty.
* 'args' can be empty (default 'ls' performed then)
*/
// public static void main(final String[] args) {
// String anOutput = "";
// if (args.length == 0) {
// anOutput = JavaSystemCaller.Exec.execute("ls");
// } else {
// String[] someParameters = null;
// anOutput = JavaSystemCaller.Exec.execute(args[0], someParameters);
// }
// logger.log(Level.INFO, "Final output: " + anOutput);
// }
/**
* Asynchronously read the output of a given input stream. <br /> Any
* exception during execution of the command in managed in this thread.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class StreamGobbler extends Thread {
private static final Logger logger = Logger.getLogger(StreamGobbler.class.getName());
private InputStream is;
private String type;
private StringBuffer output = new StringBuffer();
StreamGobbler(final InputStream anIs, final String aType) {
this.is = anIs;
this.type = aType;
}
/**
* Asynchronous read of the input stream. <br /> Will report output as
* its its displayed.
*
* @see java.lang.Thread#run()
*/
@Override
public final void run() {
try {
final InputStreamReader isr = new InputStreamReader(this.is);
final BufferedReader br = new BufferedReader(isr);
String line = null;
while ((line = br.readLine()) != null) {
logger.log(Level.INFO, this.type + ">" + line);
this.output.append(line + System.getProperty("line.separator"));
}
} catch (final IOException ioe) {
logger.log(Level.WARNING, ioe.getMessage());
}
}
/**
* Get output filled asynchronously. <br /> Should be called after
* execution
*
* @return final output
*/
public final String getOutput() {
return this.output.toString();
}
}
/**
* Execute a system command in the appropriate shell. <br /> Read
* asynchronously stdout and stderr to report any result.
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class Exec {
private static final Logger logger = Logger.getLogger(Exec.class.getName());
private static Process proc = null;
private static String command = null;
private static JavaSystemCaller.IShell aShell = null;
/**
* Execute a system command. <br /> Listen asynchronously to stdout and
* stderr
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
* @return final output (stdout only)
*/
public static String execute(final String aCommand, final String... someParameters) throws IOException, InterruptedException {
String output = "";
JavaSystemCaller.ExecEnvironmentFactory anExecEnvFactory = getExecEnvironmentFactory(aCommand, someParameters);
aShell = anExecEnvFactory.createShell();
command = anExecEnvFactory.createCommandLine();
final Runtime rt = Runtime.getRuntime();
logger.log(Level.INFO, "Executing " + aShell.getShellCommand() + " " + command);
proc = rt.exec(aShell.getShellCommand() + " " + command);
try {
//give time to fully start the process
Thread.sleep(2000);
} catch (InterruptedException ex) {
logger.log(Level.WARNING, "Pause interrupted");
}
// any error message?
final JavaSystemCaller.StreamGobbler errorGobbler = new JavaSystemCaller.StreamGobbler(proc.getErrorStream(), "ERROR");
// any output?
final JavaSystemCaller.StreamGobbler outputGobbler = new JavaSystemCaller.StreamGobbler(proc.getInputStream(), "OUTPUT");
// kick them off
errorGobbler.start();
outputGobbler.start();
// any error???
final int exitVal = proc.waitFor();
logger.log(Level.INFO, "ExitValue: " + exitVal);
output = outputGobbler.getOutput();
return output;
}
private static JavaSystemCaller.ExecEnvironmentFactory getExecEnvironmentFactory(final String aCommand, final String... someParameters) {
final String anOSName = System.getProperty("os.name");
if (anOSName.toLowerCase().startsWith("windows")) {
return new JavaSystemCaller.WindowsExecEnvFactory(aCommand, someParameters);
}
return new JavaSystemCaller.UnixExecEnvFactory(aCommand, someParameters);
// TODO be more specific for other OS.
}
private Exec() { /*
*
*/ }
public static synchronized void stop() {
logger.log(Level.INFO, "Stopping Execution of: " + command);
if (proc != null) {
proc.destroy();
proc = null;
}
}
public static Process getProcess() {
return proc;
}
}
private JavaSystemCaller() { /*
*
*/ }
/*
* ABSTRACT FACTORY PATTERN
*/
/**
* Environment needed to be build for the Exec class to be able to execute
* the system command. <br /> Must have the right shell and the right
* command line. <br />
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public abstract static class ExecEnvironmentFactory {
private String command = null;
private ArrayList<String> parameters = new ArrayList<String>();
final String getCommand() {
return this.command;
}
final ArrayList<String> getParameters() {
return this.parameters;
}
/**
* Builds an execution environment for a system command to be played.
* <br /> Independent from the OS.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public ExecEnvironmentFactory(final String aCommand, final String... someParameters) {
if (aCommand == null || aCommand.length() == 0) {
throw new IllegalArgumentException("Command must not be empty");
}
this.command = aCommand;
for (int i = 0; i < someParameters.length; i++) {
final String aParameter = someParameters[i];
if (aParameter == null || aParameter.length() == 0) {
throw new IllegalArgumentException("Parameter n° '" + i + "' must not be empty");
}
this.parameters.add(aParameter);
}
}
/**
* Builds the right Shell for the current OS. <br /> Allow for
* independent platform execution.
*
* @return right shell, NEVER NULL
*/
public abstract JavaSystemCaller.IShell createShell();
/**
* Builds the right command line for the current OS. <br /> Means that a
* command might be translated, if it does not fit the right OS ('dir'
* => 'ls' on unix)
*
* @return right complete command line, with parameters added (NEVER
* NULL)
*/
public abstract String createCommandLine();
protected final String buildCommandLine(final String aCommand, final ArrayList<String> someParameters) {
final StringBuilder aCommandLine = new StringBuilder();
aCommandLine.append(aCommand);
for (String aParameter : someParameters) {
aCommandLine.append(" ");
aCommandLine.append(aParameter);
}
return aCommandLine.toString();
}
}
/**
* Builds a Execution Environment for Windows. <br /> Cmd with windows
* commands
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class WindowsExecEnvFactory extends JavaSystemCaller.ExecEnvironmentFactory {
/**
* Builds an execution environment for a Windows system command to be
* played. <br /> Any command not from windows will be translated in its
* windows equivalent if possible.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public WindowsExecEnvFactory(final String aCommand, final String... someParameters) {
super(aCommand, someParameters);
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createShell()
*/
@Override
public JavaSystemCaller.IShell createShell() {
return new JavaSystemCaller.WindowsShell();
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createCommandLine()
*/
@Override
public String createCommandLine() {
String aCommand = getCommand();
if (aCommand.toLowerCase().trim().equals("ls")) {
aCommand = "dir";
}
// TODO translates other Unix commands
return buildCommandLine(aCommand, getParameters());
}
}
/**
* Builds a Execution Environment for Unix. <br /> Sh with Unix commands
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static final class UnixExecEnvFactory extends JavaSystemCaller.ExecEnvironmentFactory {
/**
* Builds an execution environment for a Unix system command to be
* played. <br /> Any command not from Unix will be translated in its
* Unix equivalent if possible.
*
* @param aCommand system command to be executed (must not be null or
* empty)
* @param someParameters parameters of the command (must not be null or
* empty)
*/
public UnixExecEnvFactory(final String aCommand, final String... someParameters) {
super(aCommand, someParameters);
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createShell()
*/
@Override
public JavaSystemCaller.IShell createShell() {
return new JavaSystemCaller.UnixShell();
}
/**
* @see test.JavaSystemCaller.ExecEnvironmentFactory#createCommandLine()
*/
@Override
public String createCommandLine() {
String aCommand = getCommand();
if (aCommand.toLowerCase().trim().equals("dir")) {
aCommand = "ls";
}
// TODO translates other Windows commands
return buildCommandLine(aCommand, getParameters());
}
}
/**
* System Shell with its right OS command. <br /> 'cmd' for Windows or 'sh'
* for Unix, ...
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public interface IShell {
/**
* Get the right shell command. <br /> Used to launch a new shell
*
* @return command used to launch a Shell (NEVEL NULL)
*/
String getShellCommand();
}
/**
* Windows shell (cmd). <br /> More accurately 'cmd /C'
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class WindowsShell implements JavaSystemCaller.IShell {
/**
* @see test.JavaSystemCaller.IShell#getShellCommand()
*/
@Override
public final String getShellCommand() {
final String osName = System.getProperty("os.name");
if (osName.equals("Windows 95")) {
return "command.com /C";
}
return "cmd.exe /C";
}
}
/**
* Unix shell (sh). <br /> More accurately 'sh -C'
*
* @author <a href="http://stackoverflow.com/users/6309/vonc">VonC</a>
*/
public static class UnixShell implements JavaSystemCaller.IShell {
/**
* @see test.JavaSystemCaller.IShell#getShellCommand()
*/
@Override
public final String getShellCommand() {
return "/bin/sh -c";
}
}
} | cleanup stdout/stderr collection threads after forked process returns
| RecentActivity/src/org/sleuthkit/autopsy/recentactivity/JavaSystemCaller.java | cleanup stdout/stderr collection threads after forked process returns | <ide><path>ecentActivity/src/org/sleuthkit/autopsy/recentactivity/JavaSystemCaller.java
<ide> private InputStream is;
<ide> private String type;
<ide> private StringBuffer output = new StringBuffer();
<add> private boolean doRun = false;
<ide>
<ide> StreamGobbler(final InputStream anIs, final String aType) {
<ide> this.is = anIs;
<ide> this.type = aType;
<add> this.doRun = true;
<ide> }
<ide>
<ide> /**
<ide> */
<ide> @Override
<ide> public final void run() {
<add> final String SEP = System.getProperty("line.separator");
<ide> try {
<ide> final InputStreamReader isr = new InputStreamReader(this.is);
<ide> final BufferedReader br = new BufferedReader(isr);
<ide> String line = null;
<del> while ((line = br.readLine()) != null) {
<add> while ( doRun && (line = br.readLine()) != null) {
<ide> logger.log(Level.INFO, this.type + ">" + line);
<del> this.output.append(line + System.getProperty("line.separator"));
<add> this.output.append(line + SEP);
<ide> }
<ide> } catch (final IOException ioe) {
<ide> logger.log(Level.WARNING, ioe.getMessage());
<ide> }
<add> }
<add>
<add> /**
<add> * Stop running the stream gobbler
<add> * The thread will exit out gracefully after the current readLine() on stream unblocks
<add> */
<add> public void stopRun() {
<add> doRun = false;
<ide> }
<ide>
<ide> /**
<ide> final int exitVal = proc.waitFor();
<ide> logger.log(Level.INFO, "ExitValue: " + exitVal);
<ide>
<add> errorGobbler.stopRun();
<add> outputGobbler.stopRun();
<add>
<ide> output = outputGobbler.getOutput();
<ide>
<ide> return output; |
|
Java | apache-2.0 | 8b7a7a40140d13cdeafefb90b915f7d769b1b5eb | 0 | pplatek/camel,onders86/camel,mnki/camel,Fabryprog/camel,davidwilliams1978/camel,tadayosi/camel,mzapletal/camel,pkletsko/camel,haku/camel,driseley/camel,pkletsko/camel,nikvaessen/camel,askannon/camel,arnaud-deprez/camel,chanakaudaya/camel,gyc567/camel,chirino/camel,allancth/camel,tadayosi/camel,bgaudaen/camel,jlpedrosa/camel,pax95/camel,allancth/camel,isavin/camel,bgaudaen/camel,lburgazzoli/camel,brreitme/camel,yuruki/camel,dkhanolkar/camel,stalet/camel,jpav/camel,pplatek/camel,mnki/camel,CodeSmell/camel,allancth/camel,grgrzybek/camel,NetNow/camel,dsimansk/camel,johnpoth/camel,dsimansk/camel,davidwilliams1978/camel,nboukhed/camel,anoordover/camel,jamesnetherton/camel,pplatek/camel,sverkera/camel,edigrid/camel,mnki/camel,mzapletal/camel,mgyongyosi/camel,FingolfinTEK/camel,NickCis/camel,satishgummadelli/camel,davidwilliams1978/camel,jpav/camel,mcollovati/camel,qst-jdc-labs/camel,scranton/camel,erwelch/camel,nikvaessen/camel,mnki/camel,askannon/camel,neoramon/camel,YoshikiHigo/camel,jarst/camel,iweiss/camel,bhaveshdt/camel,ekprayas/camel,nikhilvibhav/camel,koscejev/camel,johnpoth/camel,gyc567/camel,nicolaferraro/camel,yury-vashchyla/camel,YoshikiHigo/camel,arnaud-deprez/camel,RohanHart/camel,dkhanolkar/camel,sirlatrom/camel,gyc567/camel,gnodet/camel,coderczp/camel,adessaigne/camel,akhettar/camel,rparree/camel,davidkarlsen/camel,Fabryprog/camel,rmarting/camel,christophd/camel,veithen/camel,pmoerenhout/camel,alvinkwekel/camel,lasombra/camel,lburgazzoli/apache-camel,oalles/camel,pkletsko/camel,oscerd/camel,manuelh9r/camel,tkopczynski/camel,mohanaraosv/camel,JYBESSON/camel,atoulme/camel,dmvolod/camel,cunningt/camel,logzio/camel,lburgazzoli/camel,partis/camel,eformat/camel,davidwilliams1978/camel,mzapletal/camel,nikvaessen/camel,drsquidop/camel,grange74/camel,dvankleef/camel,jlpedrosa/camel,drsquidop/camel,onders86/camel,JYBESSON/camel,gautric/camel,bfitzpat/camel,josefkarasek/camel,sirlatrom/camel,josefkarasek/camel,pmoerenhout/camel,jlpedrosa/camel,objectiser/camel,borcsokj/camel,igarashitm/camel,haku/camel,grgrzybek/camel,Thopap/camel,dvankleef/camel,MohammedHammam/camel,drsquidop/camel,curso007/camel,DariusX/camel,trohovsky/camel,partis/camel,brreitme/camel,oalles/camel,christophd/camel,MrCoder/camel,dpocock/camel,borcsokj/camel,ramonmaruko/camel,rmarting/camel,pplatek/camel,adessaigne/camel,maschmid/camel,driseley/camel,FingolfinTEK/camel,w4tson/camel,DariusX/camel,coderczp/camel,adessaigne/camel,grange74/camel,prashant2402/camel,ekprayas/camel,prashant2402/camel,sabre1041/camel,haku/camel,woj-i/camel,satishgummadelli/camel,punkhorn/camel-upstream,adessaigne/camel,ramonmaruko/camel,dpocock/camel,skinzer/camel,engagepoint/camel,sabre1041/camel,erwelch/camel,onders86/camel,joakibj/camel,anoordover/camel,bdecoste/camel,lowwool/camel,MrCoder/camel,onders86/camel,oalles/camel,skinzer/camel,ullgren/camel,anoordover/camel,igarashitm/camel,dvankleef/camel,stravag/camel,manuelh9r/camel,onders86/camel,maschmid/camel,snadakuduru/camel,ge0ffrey/camel,iweiss/camel,w4tson/camel,acartapanis/camel,zregvart/camel,neoramon/camel,gilfernandes/camel,ramonmaruko/camel,jollygeorge/camel,JYBESSON/camel,lburgazzoli/apache-camel,salikjan/camel,joakibj/camel,eformat/camel,kevinearls/camel,bdecoste/camel,drsquidop/camel,hqstevenson/camel,MrCoder/camel,sebi-hgdata/camel,prashant2402/camel,tlehoux/camel,yuruki/camel,logzio/camel,cunningt/camel,yogamaha/camel,stravag/camel,ekprayas/camel,pmoerenhout/camel,sebi-hgdata/camel,jameszkw/camel,snadakuduru/camel,eformat/camel,CandleCandle/camel,gautric/camel,stalet/camel,anton-k11/camel,jkorab/camel,pax95/camel,curso007/camel,kevinearls/camel,drsquidop/camel,josefkarasek/camel,ekprayas/camel,stalet/camel,bdecoste/camel,dkhanolkar/camel,jollygeorge/camel,borcsokj/camel,tkopczynski/camel,objectiser/camel,FingolfinTEK/camel,rmarting/camel,driseley/camel,gyc567/camel,bgaudaen/camel,curso007/camel,duro1/camel,igarashitm/camel,edigrid/camel,snurmine/camel,anton-k11/camel,sebi-hgdata/camel,apache/camel,jameszkw/camel,mike-kukla/camel,Thopap/camel,arnaud-deprez/camel,akhettar/camel,yury-vashchyla/camel,chirino/camel,kevinearls/camel,satishgummadelli/camel,duro1/camel,satishgummadelli/camel,isururanawaka/camel,trohovsky/camel,JYBESSON/camel,jpav/camel,noelo/camel,gilfernandes/camel,chirino/camel,tarilabs/camel,sverkera/camel,dsimansk/camel,sirlatrom/camel,jmandawg/camel,pmoerenhout/camel,YMartsynkevych/camel,igarashitm/camel,jonmcewen/camel,FingolfinTEK/camel,scranton/camel,mnki/camel,aaronwalker/camel,hqstevenson/camel,partis/camel,jarst/camel,DariusX/camel,stalet/camel,joakibj/camel,maschmid/camel,sverkera/camel,askannon/camel,hqstevenson/camel,w4tson/camel,woj-i/camel,satishgummadelli/camel,tarilabs/camel,adessaigne/camel,nikhilvibhav/camel,tkopczynski/camel,grgrzybek/camel,MohammedHammam/camel,bhaveshdt/camel,joakibj/camel,jollygeorge/camel,lowwool/camel,engagepoint/camel,johnpoth/camel,oalles/camel,edigrid/camel,dmvolod/camel,allancth/camel,NickCis/camel,sabre1041/camel,NetNow/camel,eformat/camel,hqstevenson/camel,apache/camel,JYBESSON/camel,dmvolod/camel,nboukhed/camel,ssharma/camel,engagepoint/camel,anton-k11/camel,dpocock/camel,Fabryprog/camel,davidkarlsen/camel,sebi-hgdata/camel,davidkarlsen/camel,driseley/camel,pkletsko/camel,coderczp/camel,curso007/camel,anton-k11/camel,NetNow/camel,partis/camel,grange74/camel,royopa/camel,dkhanolkar/camel,bhaveshdt/camel,josefkarasek/camel,isururanawaka/camel,yuruki/camel,akhettar/camel,jarst/camel,johnpoth/camel,jamesnetherton/camel,rparree/camel,edigrid/camel,MohammedHammam/camel,akhettar/camel,jmandawg/camel,scranton/camel,rparree/camel,jonmcewen/camel,erwelch/camel,CandleCandle/camel,yogamaha/camel,veithen/camel,veithen/camel,isavin/camel,CandleCandle/camel,manuelh9r/camel,sirlatrom/camel,oscerd/camel,snurmine/camel,jamesnetherton/camel,atoulme/camel,ssharma/camel,kevinearls/camel,jpav/camel,onders86/camel,dsimansk/camel,lburgazzoli/apache-camel,jmandawg/camel,eformat/camel,woj-i/camel,erwelch/camel,bfitzpat/camel,dpocock/camel,lburgazzoli/apache-camel,koscejev/camel,pax95/camel,tadayosi/camel,lasombra/camel,bdecoste/camel,gautric/camel,gnodet/camel,yury-vashchyla/camel,MohammedHammam/camel,veithen/camel,gyc567/camel,christophd/camel,lburgazzoli/camel,aaronwalker/camel,nikhilvibhav/camel,sverkera/camel,manuelh9r/camel,ssharma/camel,royopa/camel,gilfernandes/camel,jkorab/camel,edigrid/camel,snurmine/camel,bfitzpat/camel,partis/camel,dvankleef/camel,MrCoder/camel,CodeSmell/camel,tarilabs/camel,cunningt/camel,bgaudaen/camel,isururanawaka/camel,mike-kukla/camel,gilfernandes/camel,NetNow/camel,haku/camel,dsimansk/camel,erwelch/camel,koscejev/camel,jonmcewen/camel,mohanaraosv/camel,royopa/camel,tarilabs/camel,bgaudaen/camel,maschmid/camel,snadakuduru/camel,jameszkw/camel,trohovsky/camel,tlehoux/camel,mcollovati/camel,jlpedrosa/camel,chanakaudaya/camel,NetNow/camel,tadayosi/camel,tdiesler/camel,haku/camel,haku/camel,jollygeorge/camel,pax95/camel,gautric/camel,prashant2402/camel,logzio/camel,rmarting/camel,stravag/camel,jamesnetherton/camel,ekprayas/camel,dsimansk/camel,sirlatrom/camel,logzio/camel,oalles/camel,anoordover/camel,pmoerenhout/camel,tkopczynski/camel,nikvaessen/camel,aaronwalker/camel,anoordover/camel,maschmid/camel,manuelh9r/camel,yogamaha/camel,yuruki/camel,YoshikiHigo/camel,jonmcewen/camel,sebi-hgdata/camel,trohovsky/camel,brreitme/camel,Thopap/camel,RohanHart/camel,w4tson/camel,atoulme/camel,rparree/camel,apache/camel,tkopczynski/camel,lasombra/camel,MrCoder/camel,atoulme/camel,engagepoint/camel,allancth/camel,w4tson/camel,apache/camel,davidwilliams1978/camel,qst-jdc-labs/camel,chirino/camel,Fabryprog/camel,bhaveshdt/camel,apache/camel,gnodet/camel,pplatek/camel,edigrid/camel,MohammedHammam/camel,sabre1041/camel,grange74/camel,acartapanis/camel,CandleCandle/camel,sebi-hgdata/camel,jlpedrosa/camel,lasombra/camel,objectiser/camel,tkopczynski/camel,jkorab/camel,acartapanis/camel,mohanaraosv/camel,mcollovati/camel,bhaveshdt/camel,ssharma/camel,jamesnetherton/camel,iweiss/camel,yuruki/camel,neoramon/camel,ge0ffrey/camel,davidwilliams1978/camel,YoshikiHigo/camel,jpav/camel,Thopap/camel,zregvart/camel,yogamaha/camel,isururanawaka/camel,grgrzybek/camel,CodeSmell/camel,bdecoste/camel,askannon/camel,JYBESSON/camel,grgrzybek/camel,neoramon/camel,jameszkw/camel,anton-k11/camel,grange74/camel,curso007/camel,apache/camel,YMartsynkevych/camel,oscerd/camel,lasombra/camel,qst-jdc-labs/camel,atoulme/camel,tadayosi/camel,ssharma/camel,ekprayas/camel,ge0ffrey/camel,isavin/camel,jmandawg/camel,brreitme/camel,pplatek/camel,salikjan/camel,noelo/camel,jameszkw/camel,christophd/camel,driseley/camel,isavin/camel,tarilabs/camel,royopa/camel,adessaigne/camel,mzapletal/camel,CandleCandle/camel,skinzer/camel,stravag/camel,pkletsko/camel,brreitme/camel,lowwool/camel,alvinkwekel/camel,isavin/camel,noelo/camel,logzio/camel,pplatek/camel,akhettar/camel,mohanaraosv/camel,pax95/camel,jollygeorge/camel,cunningt/camel,oscerd/camel,tdiesler/camel,qst-jdc-labs/camel,jkorab/camel,jarst/camel,noelo/camel,bfitzpat/camel,hqstevenson/camel,oalles/camel,arnaud-deprez/camel,bfitzpat/camel,nicolaferraro/camel,yury-vashchyla/camel,dkhanolkar/camel,rmarting/camel,dkhanolkar/camel,yury-vashchyla/camel,chirino/camel,partis/camel,gnodet/camel,nboukhed/camel,neoramon/camel,johnpoth/camel,mzapletal/camel,borcsokj/camel,jameszkw/camel,snadakuduru/camel,MohammedHammam/camel,mike-kukla/camel,lowwool/camel,tdiesler/camel,drsquidop/camel,mnki/camel,mcollovati/camel,mike-kukla/camel,zregvart/camel,MrCoder/camel,woj-i/camel,allancth/camel,borcsokj/camel,skinzer/camel,yogamaha/camel,mike-kukla/camel,veithen/camel,ramonmaruko/camel,lburgazzoli/apache-camel,mgyongyosi/camel,snadakuduru/camel,neoramon/camel,iweiss/camel,lburgazzoli/camel,aaronwalker/camel,jlpedrosa/camel,koscejev/camel,jonmcewen/camel,YMartsynkevych/camel,tarilabs/camel,mike-kukla/camel,logzio/camel,trohovsky/camel,chanakaudaya/camel,acartapanis/camel,rparree/camel,gnodet/camel,jonmcewen/camel,bgaudaen/camel,nboukhed/camel,cunningt/camel,yuruki/camel,YMartsynkevych/camel,RohanHart/camel,jarst/camel,nikvaessen/camel,ullgren/camel,punkhorn/camel-upstream,noelo/camel,qst-jdc-labs/camel,Thopap/camel,gautric/camel,gilfernandes/camel,RohanHart/camel,akhettar/camel,ullgren/camel,NickCis/camel,driseley/camel,punkhorn/camel-upstream,nikhilvibhav/camel,aaronwalker/camel,yogamaha/camel,skinzer/camel,lburgazzoli/apache-camel,coderczp/camel,veithen/camel,coderczp/camel,nikvaessen/camel,kevinearls/camel,dvankleef/camel,prashant2402/camel,bdecoste/camel,curso007/camel,josefkarasek/camel,ullgren/camel,alvinkwekel/camel,snurmine/camel,tlehoux/camel,snadakuduru/camel,christophd/camel,mgyongyosi/camel,isururanawaka/camel,anoordover/camel,jkorab/camel,duro1/camel,sabre1041/camel,sverkera/camel,jamesnetherton/camel,arnaud-deprez/camel,NickCis/camel,oscerd/camel,arnaud-deprez/camel,lasombra/camel,gyc567/camel,stalet/camel,trohovsky/camel,mgyongyosi/camel,duro1/camel,pmoerenhout/camel,qst-jdc-labs/camel,pkletsko/camel,dpocock/camel,duro1/camel,cunningt/camel,koscejev/camel,johnpoth/camel,YoshikiHigo/camel,woj-i/camel,grgrzybek/camel,CodeSmell/camel,noelo/camel,tadayosi/camel,zregvart/camel,rparree/camel,yury-vashchyla/camel,chanakaudaya/camel,tdiesler/camel,stalet/camel,coderczp/camel,alvinkwekel/camel,gautric/camel,engagepoint/camel,davidkarlsen/camel,isururanawaka/camel,jkorab/camel,royopa/camel,koscejev/camel,iweiss/camel,snurmine/camel,scranton/camel,atoulme/camel,YMartsynkevych/camel,lburgazzoli/camel,christophd/camel,prashant2402/camel,chirino/camel,chanakaudaya/camel,objectiser/camel,rmarting/camel,dmvolod/camel,tdiesler/camel,aaronwalker/camel,acartapanis/camel,pax95/camel,lowwool/camel,anton-k11/camel,stravag/camel,igarashitm/camel,mgyongyosi/camel,nicolaferraro/camel,nboukhed/camel,RohanHart/camel,punkhorn/camel-upstream,royopa/camel,dpocock/camel,sabre1041/camel,askannon/camel,satishgummadelli/camel,duro1/camel,w4tson/camel,kevinearls/camel,CandleCandle/camel,logzio/camel,josefkarasek/camel,lowwool/camel,nicolaferraro/camel,woj-i/camel,Thopap/camel,jollygeorge/camel,ssharma/camel,grange74/camel,mohanaraosv/camel,stravag/camel,mgyongyosi/camel,tlehoux/camel,jmandawg/camel,ge0ffrey/camel,ramonmaruko/camel,lburgazzoli/camel,ge0ffrey/camel,jpav/camel,scranton/camel,RohanHart/camel,hqstevenson/camel,mohanaraosv/camel,jmandawg/camel,sverkera/camel,askannon/camel,eformat/camel,erwelch/camel,YMartsynkevych/camel,bhaveshdt/camel,joakibj/camel,acartapanis/camel,oscerd/camel,dmvolod/camel,tlehoux/camel,NickCis/camel,brreitme/camel,maschmid/camel,chanakaudaya/camel,isavin/camel,skinzer/camel,jarst/camel,igarashitm/camel,iweiss/camel,bfitzpat/camel,tlehoux/camel,sirlatrom/camel,scranton/camel,tdiesler/camel,YoshikiHigo/camel,FingolfinTEK/camel,manuelh9r/camel,mzapletal/camel,joakibj/camel,borcsokj/camel,nboukhed/camel,NickCis/camel,ge0ffrey/camel,DariusX/camel,NetNow/camel,FingolfinTEK/camel,ramonmaruko/camel,snurmine/camel,dvankleef/camel,gilfernandes/camel,dmvolod/camel | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.smpp;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.jsmpp.bean.AlertNotification;
import org.jsmpp.bean.Command;
import org.jsmpp.bean.DataSm;
import org.jsmpp.bean.DeliverSm;
import org.jsmpp.bean.DeliveryReceipt;
import org.jsmpp.bean.OptionalParameter;
import org.jsmpp.bean.OptionalParameter.OctetString;
import org.jsmpp.bean.OptionalParameters;
import org.jsmpp.bean.SubmitSm;
import org.jsmpp.util.AbsoluteTimeFormatter;
import org.jsmpp.util.TimeFormatter;
/**
* A Strategy used to convert between a Camel {@link Exchange} and
* {@link SmppMessage} to and from a SMPP {@link Command}
*
* @version
*/
public class SmppBinding {
public static final String SEQUENCE_NUMBER = "CamelSmppSequenceNumber";
public static final String SUBMITTED = "CamelSmppSubmitted";
public static final String SUBMIT_DATE = "CamelSmppSubmitDate";
public static final String ERROR = "CamelSmppError";
public static final String DONE_DATE = "CamelSmppDoneDate";
public static final String DELIVERED = "CamelSmppDelivered";
public static final String COMMAND_ID = "CamelSmppCommandId";
public static final String COMMAND_STATUS = "CamelSmppCommandStatus";
public static final String ID = "CamelSmppId";
public static final String REPLACE_IF_PRESENT_FLAG = "CamelSmppReplaceIfPresentFlag";
public static final String VALIDITY_PERIOD = "CamelSmppValidityPeriod";
public static final String SCHEDULE_DELIVERY_TIME = "CamelSmppScheduleDeliveryTime";
public static final String PRIORITY_FLAG = "CamelSmppPriorityFlag";
public static final String PROTOCOL_ID = "CamelSmppProtocolId";
public static final String REGISTERED_DELIVERY = "CamelSmppRegisteredDelivery";
public static final String SERVICE_TYPE = "CamelSmppServiceType";
public static final String SOURCE_ADDR_NPI = "CamelSmppSourceAddrNpi";
public static final String SOURCE_ADDR_TON = "CamelSmppSourceAddrTon";
public static final String SOURCE_ADDR = "CamelSmppSourceAddr";
public static final String DEST_ADDR_NPI = "CamelSmppDestAddrNpi";
public static final String DEST_ADDR_TON = "CamelSmppDestAddrTon";
public static final String DEST_ADDR = "CamelSmppDestAddr";
public static final String ESME_ADDR_NPI = "CamelSmppEsmeAddrNpi";
public static final String ESME_ADDR_TON = "CamelSmppEsmeAddrTon";
public static final String ESME_ADDR = "CamelSmppEsmeAddr";
public static final String FINAL_STATUS = "CamelSmppStatus";
public static final String DATA_CODING = "CamelSmppDataCoding";
public static final String MESSAGE_TYPE = "CamelSmppMessageType";
private static TimeFormatter timeFormatter = new AbsoluteTimeFormatter();
private SmppConfiguration configuration;
public SmppBinding() {
this.configuration = new SmppConfiguration();
}
public SmppBinding(SmppConfiguration configuration) {
this.configuration = configuration;
}
/**
* Create the SubmitSm object from the inbound exchange
*
* @throws UnsupportedEncodingException if the encoding is not supported
*/
public SubmitSm createSubmitSm(Exchange exchange) throws UnsupportedEncodingException {
Message in = exchange.getIn();
String body = exchange.getIn().getBody(String.class);
SubmitSm submitSm = new SubmitSm();
if (body != null) {
byte[] shortMessage = body.getBytes(configuration.getEncoding());
if (shortMessage.length < 255) {
submitSm.setShortMessage(shortMessage);
// To avoid the NPE error
submitSm.setOptionalParametes(new OptionalParameter[]{});
} else {
submitSm.setShortMessage(new byte[0]);
OptionalParameter messagePayloadTLV = OptionalParameters.deserialize(OptionalParameter.Tag.MESSAGE_PAYLOAD.code(), shortMessage);
submitSm.setOptionalParametes(messagePayloadTLV);
}
}
if (in.getHeaders().containsKey(DEST_ADDR)) {
submitSm.setDestAddress(in.getHeader(DEST_ADDR, String.class));
} else {
submitSm.setDestAddress(configuration.getDestAddr());
}
if (in.getHeaders().containsKey(DEST_ADDR_TON)) {
submitSm.setDestAddrTon(in.getHeader(DEST_ADDR_TON, Byte.class));
} else {
submitSm.setDestAddrTon(configuration.getDestAddrTon());
}
if (in.getHeaders().containsKey(DEST_ADDR_NPI)) {
submitSm.setDestAddrNpi(in.getHeader(DEST_ADDR_NPI, Byte.class));
} else {
submitSm.setDestAddrNpi(configuration.getDestAddrNpi());
}
if (in.getHeaders().containsKey(SOURCE_ADDR)) {
submitSm.setSourceAddr(in.getHeader(SOURCE_ADDR, String.class));
} else {
submitSm.setSourceAddr(configuration.getSourceAddr());
}
if (in.getHeaders().containsKey(SOURCE_ADDR_TON)) {
submitSm.setSourceAddrTon(in.getHeader(SOURCE_ADDR_TON, Byte.class));
} else {
submitSm.setSourceAddrTon(configuration.getSourceAddrTon());
}
if (in.getHeaders().containsKey(SOURCE_ADDR_NPI)) {
submitSm.setSourceAddrNpi(in.getHeader(SOURCE_ADDR_NPI, Byte.class));
} else {
submitSm.setSourceAddrNpi(configuration.getSourceAddrNpi());
}
if (in.getHeaders().containsKey(SERVICE_TYPE)) {
submitSm.setServiceType(in.getHeader(SERVICE_TYPE, String.class));
} else {
submitSm.setServiceType(configuration.getServiceType());
}
if (in.getHeaders().containsKey(REGISTERED_DELIVERY)) {
submitSm.setRegisteredDelivery(in.getHeader(REGISTERED_DELIVERY, Byte.class));
} else {
submitSm.setRegisteredDelivery(configuration.getRegisteredDelivery());
}
if (in.getHeaders().containsKey(PROTOCOL_ID)) {
submitSm.setProtocolId(in.getHeader(PROTOCOL_ID, Byte.class));
} else {
submitSm.setProtocolId(configuration.getProtocolId());
}
if (in.getHeaders().containsKey(PRIORITY_FLAG)) {
submitSm.setPriorityFlag(in.getHeader(PRIORITY_FLAG, Byte.class));
} else {
submitSm.setPriorityFlag(configuration.getPriorityFlag());
}
if (in.getHeaders().containsKey(SCHEDULE_DELIVERY_TIME)) {
submitSm.setScheduleDeliveryTime(timeFormatter.format(in.getHeader(SCHEDULE_DELIVERY_TIME, Date.class)));
}
if (in.getHeaders().containsKey(VALIDITY_PERIOD)) {
submitSm.setValidityPeriod(timeFormatter.format(in.getHeader(VALIDITY_PERIOD, Date.class)));
}
if (in.getHeaders().containsKey(REPLACE_IF_PRESENT_FLAG)) {
submitSm.setReplaceIfPresent(in.getHeader(REPLACE_IF_PRESENT_FLAG, Byte.class));
} else {
submitSm.setReplaceIfPresent(configuration.getReplaceIfPresentFlag());
}
if (in.getHeaders().containsKey(DATA_CODING)) {
submitSm.setDataCoding(in.getHeader(DATA_CODING, Byte.class));
} else {
submitSm.setDataCoding(configuration.getDataCoding());
}
return submitSm;
}
/**
* Create a new SmppMessage from the inbound alert notification
*/
public SmppMessage createSmppMessage(AlertNotification alertNotification) {
SmppMessage smppMessage = new SmppMessage(alertNotification, configuration);
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.AlertNotification.toString());
smppMessage.setHeader(SEQUENCE_NUMBER, alertNotification.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, alertNotification.getCommandId());
smppMessage.setHeader(COMMAND_STATUS, alertNotification.getCommandStatus());
smppMessage.setHeader(SOURCE_ADDR, alertNotification.getSourceAddr());
smppMessage.setHeader(SOURCE_ADDR_NPI, alertNotification.getSourceAddrNpi());
smppMessage.setHeader(SOURCE_ADDR_TON, alertNotification.getSourceAddrTon());
smppMessage.setHeader(ESME_ADDR, alertNotification.getEsmeAddr());
smppMessage.setHeader(ESME_ADDR_NPI, alertNotification.getEsmeAddrNpi());
smppMessage.setHeader(ESME_ADDR_TON, alertNotification.getEsmeAddrTon());
return smppMessage;
}
/**
* Create a new SmppMessage from the inbound deliver sm or deliver receipt
*/
public SmppMessage createSmppMessage(DeliverSm deliverSm) throws Exception {
SmppMessage smppMessage = new SmppMessage(deliverSm, configuration);
if (deliverSm.isSmscDeliveryReceipt()) {
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DeliveryReceipt.toString());
DeliveryReceipt smscDeliveryReceipt = deliverSm.getShortMessageAsDeliveryReceipt();
smppMessage.setBody(smscDeliveryReceipt.getText());
smppMessage.setHeader(ID, smscDeliveryReceipt.getId());
smppMessage.setHeader(DELIVERED, smscDeliveryReceipt.getDelivered());
smppMessage.setHeader(DONE_DATE, smscDeliveryReceipt.getDoneDate());
if (!"000".equals(smscDeliveryReceipt.getError())) {
smppMessage.setHeader(ERROR, smscDeliveryReceipt.getError());
}
smppMessage.setHeader(SUBMIT_DATE, smscDeliveryReceipt.getSubmitDate());
smppMessage.setHeader(SUBMITTED, smscDeliveryReceipt.getSubmitted());
smppMessage.setHeader(FINAL_STATUS, smscDeliveryReceipt.getFinalStatus());
} else {
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DeliverSm.toString());
if (deliverSm.getShortMessage() != null) {
smppMessage.setBody(String.valueOf(new String(deliverSm.getShortMessage(),
configuration.getEncoding())));
} else if (deliverSm.getOptionalParametes() != null && deliverSm.getOptionalParametes().length > 0) {
List<OptionalParameter> oplist = Arrays.asList(deliverSm.getOptionalParametes());
for (OptionalParameter optPara : oplist) {
if (OptionalParameter.Tag.MESSAGE_PAYLOAD.code() == optPara.tag && OctetString.class.isInstance(optPara)) {
smppMessage.setBody(((OctetString) optPara).getValueAsString());
break;
}
}
}
smppMessage.setHeader(SEQUENCE_NUMBER, deliverSm.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, deliverSm.getCommandId());
smppMessage.setHeader(SOURCE_ADDR, deliverSm.getSourceAddr());
smppMessage.setHeader(DEST_ADDR, deliverSm.getDestAddress());
smppMessage.setHeader(SCHEDULE_DELIVERY_TIME, deliverSm.getScheduleDeliveryTime());
smppMessage.setHeader(VALIDITY_PERIOD, deliverSm.getValidityPeriod());
smppMessage.setHeader(SERVICE_TYPE, deliverSm.getServiceType());
}
return smppMessage;
}
public SmppMessage createSmppMessage(DataSm dataSm, String smppMessageId) {
SmppMessage smppMessage = new SmppMessage(dataSm, configuration);
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DataSm.toString());
smppMessage.setHeader(ID, smppMessageId);
smppMessage.setHeader(SEQUENCE_NUMBER, dataSm.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, dataSm.getCommandId());
smppMessage.setHeader(COMMAND_STATUS, dataSm.getCommandStatus());
smppMessage.setHeader(SOURCE_ADDR, dataSm.getSourceAddr());
smppMessage.setHeader(SOURCE_ADDR_NPI, dataSm.getSourceAddrNpi());
smppMessage.setHeader(SOURCE_ADDR_TON, dataSm.getSourceAddrTon());
smppMessage.setHeader(DEST_ADDR, dataSm.getDestAddress());
smppMessage.setHeader(DEST_ADDR_NPI, dataSm.getDestAddrNpi());
smppMessage.setHeader(DEST_ADDR_TON, dataSm.getDestAddrTon());
smppMessage.setHeader(SERVICE_TYPE, dataSm.getServiceType());
smppMessage.setHeader(REGISTERED_DELIVERY, dataSm.getRegisteredDelivery());
smppMessage.setHeader(DATA_CODING, dataSm.getDataCoding());
return smppMessage;
}
/**
* Returns the current date. Externalized for better test support.
*
* @return the current date
*/
Date getCurrentDate() {
return new Date();
}
/**
* Returns the smpp configuration
*
* @return the configuration
*/
public SmppConfiguration getConfiguration() {
return configuration;
}
/**
* Set the smpp configuration.
*
* @param configuration smppConfiguration
*/
public void setConfiguration(SmppConfiguration configuration) {
this.configuration = configuration;
}
}
| components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppBinding.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.smpp;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.jsmpp.bean.AlertNotification;
import org.jsmpp.bean.Command;
import org.jsmpp.bean.DataSm;
import org.jsmpp.bean.DeliverSm;
import org.jsmpp.bean.DeliveryReceipt;
import org.jsmpp.bean.OptionalParameter;
import org.jsmpp.bean.OptionalParameter.OctetString;
import org.jsmpp.bean.OptionalParameters;
import org.jsmpp.bean.SubmitSm;
import org.jsmpp.util.AbsoluteTimeFormatter;
import org.jsmpp.util.TimeFormatter;
/**
* A Strategy used to convert between a Camel {@link Exchange} and
* {@link SmppMessage} to and from a SMPP {@link Command}
*
* @version
*/
public class SmppBinding {
public static final String SEQUENCE_NUMBER = "CamelSmppSequenceNumber";
public static final String SUBMITTED = "CamelSmppSubmitted";
public static final String SUBMIT_DATE = "CamelSmppSubmitDate";
public static final String ERROR = "CamelSmppError";
public static final String DONE_DATE = "CamelSmppDoneDate";
public static final String DELIVERED = "CamelSmppDelivered";
public static final String COMMAND_ID = "CamelSmppCommandId";
public static final String COMMAND_STATUS = "CamelSmppCommandStatus";
public static final String ID = "CamelSmppId";
public static final String REPLACE_IF_PRESENT_FLAG = "CamelSmppReplaceIfPresentFlag";
public static final String VALIDITY_PERIOD = "CamelSmppValidityPeriod";
public static final String SCHEDULE_DELIVERY_TIME = "CamelSmppScheduleDeliveryTime";
public static final String PRIORITY_FLAG = "CamelSmppPriorityFlag";
public static final String PROTOCOL_ID = "CamelSmppProtocolId";
public static final String REGISTERED_DELIVERY = "CamelSmppRegisteredDelivery";
public static final String SERVICE_TYPE = "CamelSmppServiceType";
public static final String SOURCE_ADDR_NPI = "CamelSmppSourceAddrNpi";
public static final String SOURCE_ADDR_TON = "CamelSmppSourceAddrTon";
public static final String SOURCE_ADDR = "CamelSmppSourceAddr";
public static final String DEST_ADDR_NPI = "CamelSmppDestAddrNpi";
public static final String DEST_ADDR_TON = "CamelSmppDestAddrTon";
public static final String DEST_ADDR = "CamelSmppDestAddr";
public static final String ESME_ADDR_NPI = "CamelSmppEsmeAddrNpi";
public static final String ESME_ADDR_TON = "CamelSmppEsmeAddrTon";
public static final String ESME_ADDR = "CamelSmppEsmeAddr";
public static final String FINAL_STATUS = "CamelSmppStatus";
public static final String DATA_CODING = "CamelSmppDataCoding";
public static final String MESSAGE_TYPE = "CamelSmppMessageType";
private static TimeFormatter timeFormatter = new AbsoluteTimeFormatter();
private SmppConfiguration configuration;
public SmppBinding() {
this.configuration = new SmppConfiguration();
}
public SmppBinding(SmppConfiguration configuration) {
this.configuration = configuration;
}
/**
* Create the SubmitSm object from the inbound exchange
*
* @throws UnsupportedEncodingException if the encoding is not supported
*/
public SubmitSm createSubmitSm(Exchange exchange) throws UnsupportedEncodingException {
Message in = exchange.getIn();
String body = exchange.getIn().getBody(String.class);
SubmitSm submitSm = new SubmitSm();
if (body != null) {
byte[] shortMessage = body.getBytes(configuration.getEncoding());
if (shortMessage.length < 255) {
submitSm.setShortMessage(shortMessage);
} else {
submitSm.setShortMessage(new byte[0]);
OptionalParameter messagePayloadTLV = OptionalParameters.deserialize(OptionalParameter.Tag.MESSAGE_PAYLOAD.code(), shortMessage);
submitSm.setOptionalParametes(messagePayloadTLV);
}
}
if (in.getHeaders().containsKey(DEST_ADDR)) {
submitSm.setDestAddress(in.getHeader(DEST_ADDR, String.class));
} else {
submitSm.setDestAddress(configuration.getDestAddr());
}
if (in.getHeaders().containsKey(DEST_ADDR_TON)) {
submitSm.setDestAddrTon(in.getHeader(DEST_ADDR_TON, Byte.class));
} else {
submitSm.setDestAddrTon(configuration.getDestAddrTon());
}
if (in.getHeaders().containsKey(DEST_ADDR_NPI)) {
submitSm.setDestAddrNpi(in.getHeader(DEST_ADDR_NPI, Byte.class));
} else {
submitSm.setDestAddrNpi(configuration.getDestAddrNpi());
}
if (in.getHeaders().containsKey(SOURCE_ADDR)) {
submitSm.setSourceAddr(in.getHeader(SOURCE_ADDR, String.class));
} else {
submitSm.setSourceAddr(configuration.getSourceAddr());
}
if (in.getHeaders().containsKey(SOURCE_ADDR_TON)) {
submitSm.setSourceAddrTon(in.getHeader(SOURCE_ADDR_TON, Byte.class));
} else {
submitSm.setSourceAddrTon(configuration.getSourceAddrTon());
}
if (in.getHeaders().containsKey(SOURCE_ADDR_NPI)) {
submitSm.setSourceAddrNpi(in.getHeader(SOURCE_ADDR_NPI, Byte.class));
} else {
submitSm.setSourceAddrNpi(configuration.getSourceAddrNpi());
}
if (in.getHeaders().containsKey(SERVICE_TYPE)) {
submitSm.setServiceType(in.getHeader(SERVICE_TYPE, String.class));
} else {
submitSm.setServiceType(configuration.getServiceType());
}
if (in.getHeaders().containsKey(REGISTERED_DELIVERY)) {
submitSm.setRegisteredDelivery(in.getHeader(REGISTERED_DELIVERY, Byte.class));
} else {
submitSm.setRegisteredDelivery(configuration.getRegisteredDelivery());
}
if (in.getHeaders().containsKey(PROTOCOL_ID)) {
submitSm.setProtocolId(in.getHeader(PROTOCOL_ID, Byte.class));
} else {
submitSm.setProtocolId(configuration.getProtocolId());
}
if (in.getHeaders().containsKey(PRIORITY_FLAG)) {
submitSm.setPriorityFlag(in.getHeader(PRIORITY_FLAG, Byte.class));
} else {
submitSm.setPriorityFlag(configuration.getPriorityFlag());
}
if (in.getHeaders().containsKey(SCHEDULE_DELIVERY_TIME)) {
submitSm.setScheduleDeliveryTime(timeFormatter.format(in.getHeader(SCHEDULE_DELIVERY_TIME, Date.class)));
}
if (in.getHeaders().containsKey(VALIDITY_PERIOD)) {
submitSm.setValidityPeriod(timeFormatter.format(in.getHeader(VALIDITY_PERIOD, Date.class)));
}
if (in.getHeaders().containsKey(REPLACE_IF_PRESENT_FLAG)) {
submitSm.setReplaceIfPresent(in.getHeader(REPLACE_IF_PRESENT_FLAG, Byte.class));
} else {
submitSm.setReplaceIfPresent(configuration.getReplaceIfPresentFlag());
}
if (in.getHeaders().containsKey(DATA_CODING)) {
submitSm.setDataCoding(in.getHeader(DATA_CODING, Byte.class));
} else {
submitSm.setDataCoding(configuration.getDataCoding());
}
return submitSm;
}
/**
* Create a new SmppMessage from the inbound alert notification
*/
public SmppMessage createSmppMessage(AlertNotification alertNotification) {
SmppMessage smppMessage = new SmppMessage(alertNotification, configuration);
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.AlertNotification.toString());
smppMessage.setHeader(SEQUENCE_NUMBER, alertNotification.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, alertNotification.getCommandId());
smppMessage.setHeader(COMMAND_STATUS, alertNotification.getCommandStatus());
smppMessage.setHeader(SOURCE_ADDR, alertNotification.getSourceAddr());
smppMessage.setHeader(SOURCE_ADDR_NPI, alertNotification.getSourceAddrNpi());
smppMessage.setHeader(SOURCE_ADDR_TON, alertNotification.getSourceAddrTon());
smppMessage.setHeader(ESME_ADDR, alertNotification.getEsmeAddr());
smppMessage.setHeader(ESME_ADDR_NPI, alertNotification.getEsmeAddrNpi());
smppMessage.setHeader(ESME_ADDR_TON, alertNotification.getEsmeAddrTon());
return smppMessage;
}
/**
* Create a new SmppMessage from the inbound deliver sm or deliver receipt
*/
public SmppMessage createSmppMessage(DeliverSm deliverSm) throws Exception {
SmppMessage smppMessage = new SmppMessage(deliverSm, configuration);
if (deliverSm.isSmscDeliveryReceipt()) {
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DeliveryReceipt.toString());
DeliveryReceipt smscDeliveryReceipt = deliverSm.getShortMessageAsDeliveryReceipt();
smppMessage.setBody(smscDeliveryReceipt.getText());
smppMessage.setHeader(ID, smscDeliveryReceipt.getId());
smppMessage.setHeader(DELIVERED, smscDeliveryReceipt.getDelivered());
smppMessage.setHeader(DONE_DATE, smscDeliveryReceipt.getDoneDate());
if (!"000".equals(smscDeliveryReceipt.getError())) {
smppMessage.setHeader(ERROR, smscDeliveryReceipt.getError());
}
smppMessage.setHeader(SUBMIT_DATE, smscDeliveryReceipt.getSubmitDate());
smppMessage.setHeader(SUBMITTED, smscDeliveryReceipt.getSubmitted());
smppMessage.setHeader(FINAL_STATUS, smscDeliveryReceipt.getFinalStatus());
} else {
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DeliverSm.toString());
if (deliverSm.getShortMessage() != null) {
smppMessage.setBody(String.valueOf(new String(deliverSm.getShortMessage(),
configuration.getEncoding())));
} else if (deliverSm.getOptionalParametes() != null && deliverSm.getOptionalParametes().length > 0) {
List<OptionalParameter> oplist = Arrays.asList(deliverSm.getOptionalParametes());
for (OptionalParameter optPara : oplist) {
if (OptionalParameter.Tag.MESSAGE_PAYLOAD.code() == optPara.tag && OctetString.class.isInstance(optPara)) {
smppMessage.setBody(((OctetString) optPara).getValueAsString());
break;
}
}
}
smppMessage.setHeader(SEQUENCE_NUMBER, deliverSm.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, deliverSm.getCommandId());
smppMessage.setHeader(SOURCE_ADDR, deliverSm.getSourceAddr());
smppMessage.setHeader(DEST_ADDR, deliverSm.getDestAddress());
smppMessage.setHeader(SCHEDULE_DELIVERY_TIME, deliverSm.getScheduleDeliveryTime());
smppMessage.setHeader(VALIDITY_PERIOD, deliverSm.getValidityPeriod());
smppMessage.setHeader(SERVICE_TYPE, deliverSm.getServiceType());
}
return smppMessage;
}
public SmppMessage createSmppMessage(DataSm dataSm, String smppMessageId) {
SmppMessage smppMessage = new SmppMessage(dataSm, configuration);
smppMessage.setHeader(MESSAGE_TYPE, SmppMessageType.DataSm.toString());
smppMessage.setHeader(ID, smppMessageId);
smppMessage.setHeader(SEQUENCE_NUMBER, dataSm.getSequenceNumber());
smppMessage.setHeader(COMMAND_ID, dataSm.getCommandId());
smppMessage.setHeader(COMMAND_STATUS, dataSm.getCommandStatus());
smppMessage.setHeader(SOURCE_ADDR, dataSm.getSourceAddr());
smppMessage.setHeader(SOURCE_ADDR_NPI, dataSm.getSourceAddrNpi());
smppMessage.setHeader(SOURCE_ADDR_TON, dataSm.getSourceAddrTon());
smppMessage.setHeader(DEST_ADDR, dataSm.getDestAddress());
smppMessage.setHeader(DEST_ADDR_NPI, dataSm.getDestAddrNpi());
smppMessage.setHeader(DEST_ADDR_TON, dataSm.getDestAddrTon());
smppMessage.setHeader(SERVICE_TYPE, dataSm.getServiceType());
smppMessage.setHeader(REGISTERED_DELIVERY, dataSm.getRegisteredDelivery());
smppMessage.setHeader(DATA_CODING, dataSm.getDataCoding());
return smppMessage;
}
/**
* Returns the current date. Externalized for better test support.
*
* @return the current date
*/
Date getCurrentDate() {
return new Date();
}
/**
* Returns the smpp configuration
*
* @return the configuration
*/
public SmppConfiguration getConfiguration() {
return configuration;
}
/**
* Set the smpp configuration.
*
* @param configuration smppConfiguration
*/
public void setConfiguration(SmppConfiguration configuration) {
this.configuration = configuration;
}
} | CAMEL-4422 Fixed the NPE issue of camel-smpp when sending the message length is less then 255
git-svn-id: 11f3c9e1d08a13a4be44fe98a6d63a9c00f6ab23@1165987 13f79535-47bb-0310-9956-ffa450edef68
| components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppBinding.java | CAMEL-4422 Fixed the NPE issue of camel-smpp when sending the message length is less then 255 | <ide><path>omponents/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppBinding.java
<ide>
<ide> if (shortMessage.length < 255) {
<ide> submitSm.setShortMessage(shortMessage);
<add> // To avoid the NPE error
<add> submitSm.setOptionalParametes(new OptionalParameter[]{});
<ide> } else {
<ide> submitSm.setShortMessage(new byte[0]);
<ide> OptionalParameter messagePayloadTLV = OptionalParameters.deserialize(OptionalParameter.Tag.MESSAGE_PAYLOAD.code(), shortMessage); |
|
Java | apache-2.0 | ee991f5f30f966c368e0bba60218373ecfebc675 | 0 | EagerLogic/Cubee | package com.eagerlogic.cubee.client.styles;
/**
*
* @author dipacs
*/
public final class Color {
public static final Color BLACK = new Color(0xff000000);
public static final Color WHITE = new Color(0xffffffff);
public static final Color TRANSPARENT = new Color(0x00000000);
public static Color getArgbColor(int argb) {
return new Color(argb);
}
public static Color getArgbColor(int alpha, int red, int green, int blue) {
alpha = fixComponent(alpha);
red = fixComponent(red);
green = fixComponent(green);
blue = fixComponent(blue);
return getArgbColor(
alpha << 24
| red << 16
| green << 8
| blue
);
}
public static Color getRgbColor(int rgb) {
return getArgbColor(rgb | 0xff000000);
}
public static Color getRgbColor(int red, int green, int blue) {
return getArgbColor(255, red, green, blue);
}
private static int fixComponent(int component) {
if (component < 0) {
return 0;
}
if (component > 255) {
return 255;
}
return component;
}
public static Color fadeColors(Color startColor, Color endColor, double fadePosition) {
return Color.getArgbColor(
mixComponent(startColor.getAlpha(), endColor.getAlpha(), fadePosition),
mixComponent(startColor.getRed(), endColor.getRed(), fadePosition),
mixComponent(startColor.getGreen(), endColor.getGreen(), fadePosition),
mixComponent(startColor.getBlue(), endColor.getBlue(), fadePosition)
);
}
private static int mixComponent(int startValue, int endValue, double pos) {
int res = (int) (startValue + ((endValue - startValue) * pos));
res = fixComponent(res);
return res;
}
private final int argb;
public Color(int argb) {
this.argb = argb;
}
public int getArgb() {
return argb;
}
public final int getAlpha() {
return (argb >>> 24) & 0xff;
}
public final int getRed() {
return (argb >>> 16) & 0xff;
}
public final int getGreen() {
return (argb >>> 8) & 0xff;
}
public final int getBlue() {
return argb & 0xff;
}
public final Color fade(Color fadeColor, double fadePosition) {
return Color.fadeColors(this, fadeColor, fadePosition);
}
public String toCSS() {
return "rgba(" + getRed() + ", " + getGreen() + ", " + getBlue() + ", " + (getAlpha() / 255.0) + ")";
}
}
| trunk/src/Cubee/src/com/eagerlogic/cubee/client/styles/Color.java | package com.eagerlogic.cubee.client.styles;
/**
*
* @author dipacs
*/
public final class Color {
public static final Color BLACK = new Color(0xff000000);
public static final Color WHITE = new Color(0xffffffff);
public static final Color TRANSPARENT = new Color(0x00000000);
public static Color getArgbColor(int argb) {
return new Color(argb);
}
public static Color getArgbColor(int alpha, int red, int green, int blue) {
alpha = fixComponent(alpha);
red = fixComponent(red);
green = fixComponent(green);
blue = fixComponent(blue);
return getArgbColor(
alpha << 24
| red << 16
| green << 8
| blue
);
}
public static Color getRgbColor(int rgb) {
return getArgbColor(rgb | 0xff000000);
}
public static Color getRgbColor(int red, int green, int blue) {
return getArgbColor(255, red, green, blue);
}
private static int fixComponent(int component) {
if (component < 0) {
return 0;
}
if (component > 255) {
return 255;
}
return component;
}
public static Color fadeColors(Color startColor, Color endColor, double fadePosition) {
return Color.getArgbColor(
mixComponent(startColor.getAlpha(), endColor.getAlpha(), fadePosition),
mixComponent(startColor.getRed(), endColor.getRed(), fadePosition),
mixComponent(startColor.getGreen(), endColor.getGreen(), fadePosition),
mixComponent(startColor.getBlue(), endColor.getBlue(), fadePosition)
);
}
private static int mixComponent(int startValue, int endValue, double pos) {
int res = (int) (startValue + ((endValue - startValue) * pos));
res = fixComponent(res);
return res;
}
private final int argb;
public Color(int argb) {
this.argb = argb;
}
public int getArgb() {
return argb;
}
public final int getAlpha() {
return (argb >>> 24) & 0xff;
}
public final int getRed() {
return (argb >>> 16) & 0xff;
}
public final int getGreen() {
return (argb >>> 8) & 0xff;
}
public final int getBlue() {
return argb & 0xff;
}
public final Color fade(Color fadeColor, double fadePosition) {
return Color.fadeColors(this, fadeColor, fadePosition);
}
public String toCSS() {
return "rgba(" + getRed() + ", " + getGreen() + ", " + getBlue() + ", " + (getAlpha() / 255.0);
}
}
| Fixed missing trailing ")" from Color.toCSS result.
| trunk/src/Cubee/src/com/eagerlogic/cubee/client/styles/Color.java | Fixed missing trailing ")" from Color.toCSS result. | <ide><path>runk/src/Cubee/src/com/eagerlogic/cubee/client/styles/Color.java
<ide> }
<ide>
<ide> public String toCSS() {
<del> return "rgba(" + getRed() + ", " + getGreen() + ", " + getBlue() + ", " + (getAlpha() / 255.0);
<add> return "rgba(" + getRed() + ", " + getGreen() + ", " + getBlue() + ", " + (getAlpha() / 255.0) + ")";
<ide> }
<ide>
<ide> } |
|
Java | apache-2.0 | b46d415ae3fe192468efb338e67051c97593dccd | 0 | rsudev/c-geo-opensource,pstorch/cgeo,mucek4/cgeo,superspindel/cgeo,kumy/cgeo,samueltardieu/cgeo,cgeo/cgeo,SammysHP/cgeo,Bananeweizen/cgeo,kumy/cgeo,S-Bartfast/cgeo,auricgoldfinger/cgeo,mucek4/cgeo,Bananeweizen/cgeo,S-Bartfast/cgeo,cgeo/cgeo,pstorch/cgeo,superspindel/cgeo,samueltardieu/cgeo,kumy/cgeo,matej116/cgeo,auricgoldfinger/cgeo,matej116/cgeo,matej116/cgeo,SammysHP/cgeo,pstorch/cgeo,SammysHP/cgeo,S-Bartfast/cgeo,Bananeweizen/cgeo,rsudev/c-geo-opensource,auricgoldfinger/cgeo,tobiasge/cgeo,superspindel/cgeo,mucek4/cgeo,samueltardieu/cgeo,tobiasge/cgeo,rsudev/c-geo-opensource,cgeo/cgeo,tobiasge/cgeo,cgeo/cgeo | package cgeo.geocaching;
import cgeo.geocaching.activity.AbstractActivity;
import cgeo.geocaching.activity.AbstractListActivity;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.activity.FilteredActivity;
import cgeo.geocaching.activity.Progress;
import cgeo.geocaching.activity.ShowcaseViewBuilder;
import cgeo.geocaching.apps.cachelist.CacheListApp;
import cgeo.geocaching.apps.cachelist.CacheListAppUtils;
import cgeo.geocaching.apps.cachelist.CacheListApps;
import cgeo.geocaching.apps.cachelist.ListNavigationSelectionActionProvider;
import cgeo.geocaching.apps.navi.NavigationAppFactory;
import cgeo.geocaching.command.DeleteCachesCommand;
import cgeo.geocaching.command.DeleteListCommand;
import cgeo.geocaching.command.MoveToListCommand;
import cgeo.geocaching.command.RenameListCommand;
import cgeo.geocaching.compatibility.Compatibility;
import cgeo.geocaching.connector.gc.RecaptchaHandler;
import cgeo.geocaching.enumerations.CacheListType;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.export.FieldnoteExport;
import cgeo.geocaching.export.GpxExport;
import cgeo.geocaching.files.GPXImporter;
import cgeo.geocaching.filter.FilterActivity;
import cgeo.geocaching.filter.IFilter;
import cgeo.geocaching.list.AbstractList;
import cgeo.geocaching.list.ListNameMemento;
import cgeo.geocaching.list.PseudoList;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.loaders.AbstractSearchLoader;
import cgeo.geocaching.loaders.AbstractSearchLoader.CacheListLoaderType;
import cgeo.geocaching.loaders.CoordsGeocacheListLoader;
import cgeo.geocaching.loaders.FinderGeocacheListLoader;
import cgeo.geocaching.loaders.HistoryGeocacheListLoader;
import cgeo.geocaching.loaders.KeywordGeocacheListLoader;
import cgeo.geocaching.loaders.NextPageGeocacheListLoader;
import cgeo.geocaching.loaders.OfflineGeocacheListLoader;
import cgeo.geocaching.loaders.OwnerGeocacheListLoader;
import cgeo.geocaching.loaders.PocketGeocacheListLoader;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.maps.CGeoMap;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.models.PocketQueryList;
import cgeo.geocaching.network.Cookies;
import cgeo.geocaching.network.DownloadProgress;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Send2CgeoDownloader;
import cgeo.geocaching.sensors.GeoData;
import cgeo.geocaching.sensors.GeoDirHandler;
import cgeo.geocaching.sensors.Sensors;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.settings.SettingsActivity;
import cgeo.geocaching.sorting.CacheComparator;
import cgeo.geocaching.sorting.SortActionProvider;
import cgeo.geocaching.storage.DataStore;
import cgeo.geocaching.ui.CacheListAdapter;
import cgeo.geocaching.ui.LoggingUI;
import cgeo.geocaching.ui.WeakReferenceHandler;
import cgeo.geocaching.ui.dialog.Dialogs;
import cgeo.geocaching.utils.AngleUtils;
import cgeo.geocaching.utils.CalendarUtils;
import cgeo.geocaching.utils.CancellableHandler;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.AndroidRxUtils;
import com.github.amlcurran.showcaseview.targets.ActionViewTarget;
import com.github.amlcurran.showcaseview.targets.ActionViewTarget.Type;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.provider.OpenableColumns;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.ActionBar;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView.AdapterContextMenuInfo;
import android.widget.ListView;
import android.widget.TextView;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import butterknife.ButterKnife;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Subscriber;
import rx.Subscription;
import rx.functions.Action0;
import rx.functions.Action1;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
public class CacheListActivity extends AbstractListActivity implements FilteredActivity, LoaderManager.LoaderCallbacks<SearchResult> {
private static final int MAX_LIST_ITEMS = 1000;
private static final int REQUEST_CODE_IMPORT_GPX = 1;
private static final String STATE_FILTER = "currentFilter";
private static final String STATE_INVERSE_SORT = "currentInverseSort";
private static final String STATE_LIST_TYPE = "currentListType";
private static final String STATE_LIST_ID = "currentListId";
private static final String BUNDLE_ACTION_KEY ="afterLoadAction";
private CacheListType type = null;
private Geopoint coords = null;
private SearchResult search = null;
/** The list of shown caches shared with Adapter. Don't manipulate outside of main thread only with Handler */
private final List<Geocache> cacheList = new ArrayList<>();
private CacheListAdapter adapter = null;
private View listFooter = null;
private TextView listFooterText = null;
private final Progress progress = new Progress();
private String title = "";
private int detailTotal = 0;
private final AtomicInteger detailProgress = new AtomicInteger(0);
private long detailProgressTime = 0L;
private int listId = StoredList.TEMPORARY_LIST.id; // Only meaningful for the OFFLINE type
private final GeoDirHandler geoDirHandler = new GeoDirHandler() {
@Override
public void updateDirection(final float direction) {
if (Settings.isLiveList()) {
adapter.setActualHeading(AngleUtils.getDirectionNow(direction));
}
}
@Override
public void updateGeoData(final GeoData geoData) {
adapter.setActualCoordinates(geoData.getCoords());
}
};
private ContextMenuInfo lastMenuInfo;
private String contextMenuGeocode = "";
private Subscription resumeSubscription;
private final ListNameMemento listNameMemento = new ListNameMemento();
// FIXME: This method has mostly been replaced by the loaders. But it still contains a license agreement check.
public void handleCachesLoaded() {
try {
updateAdapter();
updateTitle();
showFooterMoreCaches();
if (search != null && search.getError() == StatusCode.UNAPPROVED_LICENSE) {
showLicenseConfirmationDialog();
} else if (search != null && search.getError() != null) {
showToast(res.getString(R.string.err_download_fail) + ' ' + search.getError().getErrorString(res) + '.');
hideLoading();
showProgress(false);
finish();
return;
}
setAdapterCurrentCoordinates(false);
} catch (final Exception e) {
showToast(res.getString(R.string.err_detail_cache_find_any));
Log.e("CacheListActivity.loadCachesHandler", e);
hideLoading();
showProgress(false);
finish();
return;
}
try {
hideLoading();
showProgress(false);
} catch (final Exception e2) {
Log.e("CacheListActivity.loadCachesHandler.2", e2);
}
adapter.setSelectMode(false);
}
private void showLicenseConfirmationDialog() {
final AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setTitle(res.getString(R.string.license));
dialog.setMessage(res.getString(R.string.err_license));
dialog.setCancelable(true);
dialog.setNegativeButton(res.getString(R.string.license_dismiss), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
Cookies.clearCookies();
dialog.cancel();
}
});
dialog.setPositiveButton(res.getString(R.string.license_show), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
Cookies.clearCookies();
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.geocaching.com/software/agreement.aspx?ID=0")));
}
});
final AlertDialog alert = dialog.create();
alert.show();
}
private final Handler loadCachesHandler = new LoadCachesHandler(this);
private static class LoadCachesHandler extends WeakReferenceHandler<CacheListActivity> {
protected LoadCachesHandler(final CacheListActivity activity) {
super(activity);
}
@Override
public void handleMessage(final Message msg) {
final CacheListActivity activity = getActivity();
if (activity == null) {
return;
}
activity.handleCachesLoaded();
}
}
/**
* Loads the caches and fills the {@link #cacheList} according to {@link #search} content.
*
* If {@link #search} is <code>null</code>, this does nothing.
*/
private void replaceCacheListFromSearch() {
if (search != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
cacheList.clear();
// The database search was moved into the UI call intentionally. If this is done before the runOnUIThread,
// then we have 2 sets of caches in memory. This can lead to OOM for huge cache lists.
final Set<Geocache> cachesFromSearchResult = search.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB);
cacheList.addAll(cachesFromSearchResult);
adapter.reFilter();
updateTitle();
showFooterMoreCaches();
}
});
}
}
private static String getCacheNumberString(final Resources res, final int count) {
return res.getQuantityString(R.plurals.cache_counts, count, count);
}
protected void updateTitle() {
setTitle(title);
getSupportActionBar().setSubtitle(getCurrentSubtitle());
refreshSpinnerAdapter();
}
private class LoadDetailsHandler extends CancellableHandler {
@Override
public void handleRegularMessage(final Message msg) {
updateAdapter();
if (msg.what == DownloadProgress.MSG_LOADED) {
((Geocache) msg.obj).setStatusChecked(false);
adapter.notifyDataSetChanged();
final int dp = detailProgress.get();
final int secondsElapsed = (int) ((System.currentTimeMillis() - detailProgressTime) / 1000);
final int minutesRemaining = ((detailTotal - dp) * secondsElapsed / ((dp > 0) ? dp : 1) / 60);
progress.setProgress(dp);
if (minutesRemaining < 1) {
progress.setMessage(res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm));
} else {
progress.setMessage(res.getString(R.string.caches_downloading) + " " + res.getQuantityString(R.plurals.caches_eta_mins, minutesRemaining, minutesRemaining));
}
} else {
new AsyncTask<Void, Void, Set<Geocache>>() {
@Override
protected Set<Geocache> doInBackground(final Void... params) {
return search != null ? search.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB) : null;
}
@Override
protected void onPostExecute(final Set<Geocache> result) {
if (CollectionUtils.isNotEmpty(result)) {
cacheList.clear();
cacheList.addAll(result);
adapter.reFilter();
}
setAdapterCurrentCoordinates(false);
showProgress(false);
progress.dismiss();
}
}.execute();
}
}
}
/**
* TODO Possibly parts should be a Thread not a Handler
*/
private class DownloadFromWebHandler extends CancellableHandler {
@Override
public void handleRegularMessage(final Message msg) {
updateAdapter();
adapter.notifyDataSetChanged();
switch (msg.what) {
case DownloadProgress.MSG_WAITING: //no caches
progress.setMessage(res.getString(R.string.web_import_waiting));
break;
case DownloadProgress.MSG_LOADING: //cache downloading
progress.setMessage(res.getString(R.string.web_downloading) + " " + msg.obj + '…');
break;
case DownloadProgress.MSG_LOADED: //Cache downloaded
progress.setMessage(res.getString(R.string.web_downloaded) + " " + msg.obj + '…');
refreshCurrentList();
break;
case DownloadProgress.MSG_SERVER_FAIL:
progress.dismiss();
showToast(res.getString(R.string.sendToCgeo_download_fail));
finish();
break;
case DownloadProgress.MSG_NO_REGISTRATION:
progress.dismiss();
showToast(res.getString(R.string.sendToCgeo_no_registration));
finish();
break;
default: // MSG_DONE
adapter.setSelectMode(false);
replaceCacheListFromSearch();
progress.dismiss();
break;
}
}
}
private final CancellableHandler clearOfflineLogsHandler = new CancellableHandler() {
@Override
public void handleRegularMessage(final Message msg) {
adapter.setSelectMode(false);
refreshCurrentList();
replaceCacheListFromSearch();
progress.dismiss();
}
};
private final Handler importGpxAttachementFinishedHandler = new Handler() {
@Override
public void handleMessage(final Message msg) {
refreshCurrentList();
}
};
private AbstractSearchLoader currentLoader;
public CacheListActivity() {
super(true);
}
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setTheme();
setContentView(R.layout.cacheslist_activity);
// get parameters
Bundle extras = getIntent().getExtras();
if (extras != null) {
type = Intents.getListType(getIntent());
coords = extras.getParcelable(Intents.EXTRA_COORDS);
}
else {
extras = new Bundle();
}
if (isInvokedFromAttachment()) {
type = CacheListType.OFFLINE;
if (coords == null) {
coords = Geopoint.ZERO;
}
}
if (type == CacheListType.NEAREST) {
coords = Sensors.getInstance().currentGeo().getCoords();
}
setTitle(title);
// Check whether we're recreating a previously destroyed instance
if (savedInstanceState != null) {
// Restore value of members from saved state
currentFilter = savedInstanceState.getParcelable(STATE_FILTER);
currentInverseSort = savedInstanceState.getBoolean(STATE_INVERSE_SORT);
type = CacheListType.values()[savedInstanceState.getInt(STATE_LIST_TYPE, type.ordinal())];
listId = savedInstanceState.getInt(STATE_LIST_ID);
}
initAdapter();
prepareFilterBar();
if (type.canSwitch) {
initActionBarSpinner();
}
currentLoader = (AbstractSearchLoader) getSupportLoaderManager().initLoader(type.getLoaderId(), extras, this);
// init
if (CollectionUtils.isNotEmpty(cacheList)) {
// currentLoader can be null if this activity is created from a map, as onCreateLoader() will return null.
if (currentLoader != null && currentLoader.isStarted()) {
showFooterLoadingCaches();
} else {
showFooterMoreCaches();
}
}
if (isInvokedFromAttachment()) {
importGpxAttachement();
}
else {
presentShowcase();
}
}
@Override
public void onSaveInstanceState(final Bundle savedInstanceState) {
// Always call the superclass so it can save the view hierarchy state
super.onSaveInstanceState(savedInstanceState);
// Save the current Filter
savedInstanceState.putParcelable(STATE_FILTER, currentFilter);
savedInstanceState.putBoolean(STATE_INVERSE_SORT, adapter.getInverseSort());
savedInstanceState.putInt(STATE_LIST_TYPE, type.ordinal());
savedInstanceState.putInt(STATE_LIST_ID, listId);
}
/**
* Action bar spinner adapter. {@code null} for list types that don't allow switching (search results, ...).
*/
CacheListSpinnerAdapter mCacheListSpinnerAdapter;
/**
* remember current filter when switching between lists, so it can be re-applied afterwards
*/
private IFilter currentFilter = null;
private boolean currentInverseSort = false;
private SortActionProvider sortProvider;
private void initActionBarSpinner() {
mCacheListSpinnerAdapter = new CacheListSpinnerAdapter(this, R.layout.support_simple_spinner_dropdown_item);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
getSupportActionBar().setDisplayShowTitleEnabled(false);
getSupportActionBar().setListNavigationCallbacks(mCacheListSpinnerAdapter, new ActionBar.OnNavigationListener() {
@Override
public boolean onNavigationItemSelected(final int i, final long l) {
final int newListId = mCacheListSpinnerAdapter.getItem(i).id;
if (newListId != listId) {
switchListById(newListId);
}
return true;
}
});
}
private void refreshSpinnerAdapter() {
/* If the activity does not use the Spinner this will be null */
if (mCacheListSpinnerAdapter==null) {
return;
}
mCacheListSpinnerAdapter.clear();
final AbstractList list = AbstractList.getListById(listId);
for (final AbstractList l: StoredList.UserInterface.getMenuLists(false, PseudoList.NEW_LIST.id)) {
mCacheListSpinnerAdapter.add(l);
}
getSupportActionBar().setSelectedNavigationItem(mCacheListSpinnerAdapter.getPosition(list));
}
@Override
public void onConfigurationChanged(final Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (currentLoader != null && currentLoader.isLoading()) {
showFooterLoadingCaches();
}
}
private boolean isConcreteList() {
return type == CacheListType.OFFLINE &&
(listId == StoredList.STANDARD_LIST_ID || listId >= DataStore.customListIdOffset);
}
private boolean isInvokedFromAttachment() {
final Intent intent = getIntent();
return Intent.ACTION_VIEW.equals(intent.getAction()) && intent.getData() != null;
}
private void importGpxAttachement() {
new StoredList.UserInterface(this).promptForListSelection(R.string.gpx_import_select_list_title, new Action1<Integer>() {
@Override
public void call(final Integer listId) {
new GPXImporter(CacheListActivity.this, listId, importGpxAttachementFinishedHandler).importGPX();
switchListById(listId);
}
}, true, 0);
}
@Override
public void onResume() {
super.onResume();
resumeSubscription = geoDirHandler.start(GeoDirHandler.UPDATE_GEODATA | GeoDirHandler.UPDATE_DIRECTION | GeoDirHandler.LOW_POWER, 250, TimeUnit.MILLISECONDS);
adapter.setSelectMode(false);
setAdapterCurrentCoordinates(true);
if (search != null) {
replaceCacheListFromSearch();
loadCachesHandler.sendEmptyMessage(0);
}
// refresh standard list if it has changed (new caches downloaded)
if (type == CacheListType.OFFLINE && (listId >= StoredList.STANDARD_LIST_ID || listId == PseudoList.ALL_LIST.id) && search != null) {
final SearchResult newSearch = DataStore.getBatchOfStoredCaches(coords, Settings.getCacheType(), listId);
if (newSearch.getTotalCountGC() != search.getTotalCountGC()) {
refreshCurrentList();
}
}
}
private void setAdapterCurrentCoordinates(final boolean forceSort) {
adapter.setActualCoordinates(Sensors.getInstance().currentGeo().getCoords());
if (forceSort) {
adapter.forceSort();
}
}
@Override
public void onPause() {
resumeSubscription.unsubscribe();
super.onPause();
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.cache_list_options, menu);
sortProvider = (SortActionProvider) MenuItemCompat.getActionProvider(menu.findItem(R.id.menu_sort));
assert sortProvider != null; // We set it in the XML file
sortProvider.setSelection(adapter.getCacheComparator());
sortProvider.setIsEventsOnly(adapter.isEventsOnly());
sortProvider.setClickListener(new Action1<CacheComparator>() {
@Override
public void call(final CacheComparator selectedComparator) {
final CacheComparator oldComparator = adapter.getCacheComparator();
// selecting the same sorting twice will toggle the order
if (selectedComparator != null && oldComparator != null && selectedComparator.getClass().equals(oldComparator.getClass())) {
adapter.toggleInverseSort();
} else {
// always reset the inversion for a new sorting criteria
adapter.resetInverseSort();
}
setComparator(selectedComparator);
sortProvider.setSelection(selectedComparator);
}
});
ListNavigationSelectionActionProvider.initialize(menu.findItem(R.id.menu_cache_list_app_provider), new ListNavigationSelectionActionProvider.Callback() {
@Override
public void onListNavigationSelected(final CacheListApp app) {
app.invoke(CacheListAppUtils.filterCoords(cacheList), CacheListActivity.this, getFilteredSearch());
}
});
return true;
}
private static void setVisible(final Menu menu, final int itemId, final boolean visible) {
menu.findItem(itemId).setVisible(visible);
}
@Override
public boolean onPrepareOptionsMenu(final Menu menu) {
super.onPrepareOptionsMenu(menu);
final boolean isHistory = type == CacheListType.HISTORY;
final boolean isOffline = type == CacheListType.OFFLINE;
final boolean isEmpty = cacheList.isEmpty();
final boolean isConcrete = isConcreteList();
try {
if (adapter.isSelectMode()) {
menu.findItem(R.id.menu_switch_select_mode).setTitle(res.getString(R.string.caches_select_mode_exit))
.setIcon(R.drawable.ic_menu_clear_playlist);
} else {
menu.findItem(R.id.menu_switch_select_mode).setTitle(res.getString(R.string.caches_select_mode))
.setIcon(R.drawable.ic_menu_agenda);
}
menu.findItem(R.id.menu_invert_selection).setVisible(adapter.isSelectMode());
setVisible(menu, R.id.menu_show_on_map, !isEmpty);
setVisible(menu, R.id.menu_filter, search != null && search.getCount() > 0);
setVisible(menu, R.id.menu_switch_select_mode, !isEmpty);
setVisible(menu, R.id.menu_create_list, isOffline);
setVisible(menu, R.id.menu_sort, !isEmpty && !isHistory);
setVisible(menu, R.id.menu_refresh_stored, !isEmpty && (isConcrete || type != CacheListType.OFFLINE));
setVisible(menu, R.id.menu_drop_caches, !isEmpty && isOffline);
setVisible(menu, R.id.menu_delete_events, isConcrete && !isEmpty && containsPastEvents());
setVisible(menu, R.id.menu_move_to_list, isOffline && !isEmpty);
setVisible(menu, R.id.menu_remove_from_history, !isEmpty && isHistory);
setVisible(menu, R.id.menu_clear_offline_logs, !isEmpty && (isHistory || isOffline) && containsOfflineLogs());
setVisible(menu, R.id.menu_import, isOffline);
setVisible(menu, R.id.menu_import_web, isOffline);
setVisible(menu, R.id.menu_import_gpx, isOffline);
setVisible(menu, R.id.menu_export, !isEmpty);
if (!isOffline && !isHistory) {
menu.findItem(R.id.menu_refresh_stored).setTitle(R.string.caches_store_offline);
}
final boolean isNonDefaultList = isConcrete && listId != StoredList.STANDARD_LIST_ID;
if (isOffline || type == CacheListType.HISTORY) { // only offline list
setMenuItemLabel(menu, R.id.menu_drop_caches, R.string.caches_remove_selected, R.string.caches_remove_all);
setMenuItemLabel(menu, R.id.menu_refresh_stored, R.string.caches_refresh_selected, R.string.caches_refresh_all);
setMenuItemLabel(menu, R.id.menu_move_to_list, R.string.caches_move_selected, R.string.caches_move_all);
} else { // search and global list (all other than offline and history)
setMenuItemLabel(menu, R.id.menu_refresh_stored, R.string.caches_store_selected, R.string.caches_store_offline);
}
menu.findItem(R.id.menu_drop_list).setVisible(isNonDefaultList);
menu.findItem(R.id.menu_rename_list).setVisible(isNonDefaultList);
setMenuItemLabel(menu, R.id.menu_remove_from_history, R.string.cache_remove_from_history, R.string.cache_clear_history);
menu.findItem(R.id.menu_import_android).setVisible(Compatibility.isStorageAccessFrameworkAvailable() && isOffline);
final List<CacheListApp> listNavigationApps = CacheListApps.getActiveApps();
menu.findItem(R.id.menu_cache_list_app_provider).setVisible(!isEmpty && listNavigationApps.size() > 1);
menu.findItem(R.id.menu_cache_list_app).setVisible(!isEmpty && listNavigationApps.size() == 1);
} catch (final RuntimeException e) {
Log.e("CacheListActivity.onPrepareOptionsMenu", e);
}
return true;
}
private boolean containsPastEvents() {
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (CalendarUtils.isPastEvent(cache)) {
return true;
}
}
return false;
}
private boolean containsOfflineLogs() {
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (cache.isLogOffline()) {
return true;
}
}
return false;
}
private void setMenuItemLabel(final Menu menu, final int menuId, final int resIdSelection, final int resId) {
final MenuItem menuItem = menu.findItem(menuId);
if (menuItem == null) {
return;
}
final boolean hasSelection = adapter != null && adapter.getCheckedCount() > 0;
if (hasSelection) {
menuItem.setTitle(res.getString(resIdSelection) + " (" + adapter.getCheckedCount() + ")");
} else {
menuItem.setTitle(res.getString(resId));
}
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_show_on_map:
goMap();
return true;
case R.id.menu_switch_select_mode:
adapter.switchSelectMode();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_refresh_stored:
refreshStored(adapter.getCheckedOrAllCaches());
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_drop_caches:
deleteCachesWithConfirmation();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_import_gpx:
importGpx();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_import_android:
importGpxFromAndroid();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_create_list:
new StoredList.UserInterface(this).promptForListCreation(getListSwitchingRunnable(), listNameMemento.getTerm());
refreshSpinnerAdapter();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_drop_list:
removeList();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_rename_list:
renameList();
return true;
case R.id.menu_invert_selection:
adapter.invertSelection();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_filter:
showFilterMenu(null);
return true;
case R.id.menu_import_web:
importWeb();
return true;
case R.id.menu_export_gpx:
new GpxExport().export(adapter.getCheckedOrAllCaches(), this);
return true;
case R.id.menu_export_fieldnotes:
new FieldnoteExport().export(adapter.getCheckedOrAllCaches(), this);
return true;
case R.id.menu_remove_from_history:
removeFromHistoryCheck();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_move_to_list:
moveCachesToOtherList(adapter.getCheckedOrAllCaches());
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_delete_events:
deletePastEvents();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_clear_offline_logs:
clearOfflineLogs();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_cache_list_app:
if (cacheToShow()) {
CacheListApps.getActiveApps().get(0).invoke(CacheListAppUtils.filterCoords(cacheList), this, getFilteredSearch());
}
return true;
}
return super.onOptionsItemSelected(item);
}
private void checkIfEmptyAndRemoveAfterConfirm() {
final boolean isNonDefaultList = isConcreteList() && listId != StoredList.STANDARD_LIST_ID;
// Check local cacheList first, and Datastore only if needed (because of filtered lists)
// Checking is done in this order for performance reasons
if (isNonDefaultList && CollectionUtils.isEmpty(cacheList)
&& DataStore.getAllStoredCachesCount(CacheType.ALL, listId) == 0) {
// ask user, if he wants to delete the now empty list
Dialogs.confirmYesNo(this, R.string.list_dialog_remove_title, R.string.list_dialog_remove_nowempty, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int whichButton) {
removeListInternal();
}
});
}
}
private boolean cacheToShow() {
if (search == null || CollectionUtils.isEmpty(cacheList)) {
showToast(res.getString(R.string.warn_no_cache_coord));
return false;
}
return true;
}
private SearchResult getFilteredSearch() {
return new SearchResult(Geocache.getGeocodes(adapter.getFilteredList()));
}
private void deletePastEvents() {
final List<Geocache> deletion = new ArrayList<>();
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (CalendarUtils.isPastEvent(cache)) {
deletion.add(cache);
}
}
deleteCachesInternal(deletion);
}
private void clearOfflineLogs() {
Dialogs.confirmYesNo(this, R.string.caches_clear_offlinelogs, R.string.caches_clear_offlinelogs_message, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
progress.show(CacheListActivity.this, null, res.getString(R.string.caches_clear_offlinelogs_progress), true, clearOfflineLogsHandler.cancelMessage());
clearOfflineLogs(clearOfflineLogsHandler, adapter.getCheckedOrAllCaches());
}
});
}
/**
* called from the filter bar view
*/
@Override
public void showFilterMenu(final View view) {
FilterActivity.selectFilter(this);
}
private void setComparator(final CacheComparator comparator) {
adapter.setComparator(comparator);
currentInverseSort = adapter.getInverseSort();
}
@Override
public void onCreateContextMenu(final ContextMenu menu, final View view, final ContextMenu.ContextMenuInfo info) {
super.onCreateContextMenu(menu, view, info);
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (final Exception e) {
Log.w("CacheListActivity.onCreateContextMenu", e);
}
if (adapterInfo == null || adapterInfo.position >= adapter.getCount()) {
return;
}
final Geocache cache = adapter.getItem(adapterInfo.position);
menu.setHeaderTitle(StringUtils.defaultIfBlank(cache.getName(), cache.getGeocode()));
contextMenuGeocode = cache.getGeocode();
getMenuInflater().inflate(R.menu.cache_list_context, menu);
menu.findItem(R.id.menu_default_navigation).setTitle(NavigationAppFactory.getDefaultNavigationApplication().getName());
final boolean hasCoords = cache.getCoords() != null;
menu.findItem(R.id.menu_default_navigation).setVisible(hasCoords);
menu.findItem(R.id.menu_navigate).setVisible(hasCoords);
menu.findItem(R.id.menu_cache_details).setVisible(hasCoords);
final boolean isOffline = cache.isOffline();
menu.findItem(R.id.menu_drop_cache).setVisible(isOffline);
menu.findItem(R.id.menu_move_to_list).setVisible(isOffline);
menu.findItem(R.id.menu_refresh).setVisible(isOffline);
menu.findItem(R.id.menu_store_cache).setVisible(!isOffline);
LoggingUI.onPrepareOptionsMenu(menu, cache, adapterInfo.targetView);
}
private void moveCachesToOtherList(final Collection<Geocache> caches) {
new MoveToListCommand(this, caches, listId) {
@Override
protected void onFinished() {
adapter.setSelectMode(false);
refreshCurrentList(AfterLoadAction.CHECK_IF_EMPTY);
}
}.execute();
}
@Override
public boolean onContextItemSelected(final MenuItem item) {
ContextMenu.ContextMenuInfo info = item.getMenuInfo();
// restore menu info for sub menu items, see
// https://code.google.com/p/android/issues/detail?id=7139
if (info == null) {
info = lastMenuInfo;
lastMenuInfo = null;
}
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (final Exception e) {
Log.w("CacheListActivity.onContextItemSelected", e);
}
final Geocache cache = adapterInfo != null ? getCacheFromAdapter(adapterInfo) : null;
// just in case the list got resorted while we are executing this code
if (cache == null) {
return true;
}
switch (item.getItemId()) {
case R.id.menu_default_navigation:
NavigationAppFactory.startDefaultNavigationApplication(1, this, cache);
break;
case R.id.menu_navigate:
NavigationAppFactory.showNavigationMenu(this, cache, null, null);
break;
case R.id.menu_cache_details:
CacheDetailActivity.startActivity(this, cache.getGeocode(), cache.getName());
break;
case R.id.menu_drop_cache:
deleteCachesInternal(Collections.singletonList(cache));
break;
case R.id.menu_move_to_list:
moveCachesToOtherList(Collections.singletonList(cache));
break;
case R.id.menu_store_cache:
case R.id.menu_refresh:
refreshStored(Collections.singletonList(cache));
break;
default:
// we must remember the menu info for the sub menu, there is a bug
// in Android:
// https://code.google.com/p/android/issues/detail?id=7139
lastMenuInfo = info;
LoggingUI.onMenuItemSelected(item, this, cache);
}
return true;
}
/**
* Extract a cache from adapter data.
*
* @param adapterInfo
* an adapterInfo
* @return the pointed cache
*/
private Geocache getCacheFromAdapter(final AdapterContextMenuInfo adapterInfo) {
final Geocache cache = adapter.getItem(adapterInfo.position);
if (cache.getGeocode().equalsIgnoreCase(contextMenuGeocode)) {
return cache;
}
return adapter.findCacheByGeocode(contextMenuGeocode);
}
private boolean setFilter(final IFilter filter) {
currentFilter = filter;
adapter.setFilter(filter);
prepareFilterBar();
updateTitle();
invalidateOptionsMenuCompatible();
return true;
}
@Override
public boolean onKeyDown(final int keyCode, final KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (adapter.isSelectMode()) {
adapter.setSelectMode(false);
return true;
}
}
return super.onKeyDown(keyCode, event);
}
private void initAdapter() {
final ListView listView = getListView();
registerForContextMenu(listView);
adapter = new CacheListAdapter(this, cacheList, type);
adapter.setFilter(currentFilter);
if (listFooter == null) {
listFooter = getLayoutInflater().inflate(R.layout.cacheslist_footer, listView, false);
listFooter.setClickable(true);
listFooter.setOnClickListener(new MoreCachesListener());
listFooterText = ButterKnife.findById(listFooter, R.id.more_caches);
listView.addFooterView(listFooter);
}
setListAdapter(adapter);
adapter.setInverseSort(currentInverseSort);
adapter.forceSort();
}
private void updateAdapter() {
adapter.notifyDataSetChanged();
adapter.reFilter();
adapter.checkEvents();
adapter.forceSort();
}
private void showFooterLoadingCaches() {
// no footer for offline lists
if (listFooter == null) {
return;
}
listFooterText.setText(res.getString(R.string.caches_more_caches_loading));
listFooter.setClickable(false);
listFooter.setOnClickListener(null);
}
private void showFooterMoreCaches() {
// no footer in offline lists
if (listFooter == null) {
return;
}
boolean enableMore = type != CacheListType.OFFLINE && cacheList.size() < MAX_LIST_ITEMS;
if (enableMore && search != null) {
final int count = search.getTotalCountGC();
enableMore = count > 0 && cacheList.size() < count;
}
listFooter.setClickable(enableMore);
if (enableMore) {
listFooterText.setText(res.getString(R.string.caches_more_caches) + " (" + res.getString(R.string.caches_more_caches_currently) + ": " + cacheList.size() + ")");
listFooter.setOnClickListener(new MoreCachesListener());
} else if (type != CacheListType.OFFLINE) {
listFooterText.setText(res.getString(CollectionUtils.isEmpty(cacheList) ? R.string.caches_no_cache : R.string.caches_more_caches_no));
listFooter.setOnClickListener(null);
} else {
// hiding footer for offline list is not possible, it must be removed instead
// http://stackoverflow.com/questions/7576099/hiding-footer-in-listview
getListView().removeFooterView(listFooter);
}
}
private void importGpx() {
GpxFileListActivity.startSubActivity(this, listId);
}
private void importGpxFromAndroid() {
Compatibility.importGpxFromStorageAccessFramework(this, REQUEST_CODE_IMPORT_GPX);
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_IMPORT_GPX && resultCode == Activity.RESULT_OK) {
// The document selected by the user won't be returned in the intent.
// Instead, a URI to that document will be contained in the return intent
// provided to this method as a parameter. Pull that uri using "resultData.getData()"
if (data != null) {
final Uri uri = data.getData();
new GPXImporter(this, listId, importGpxAttachementFinishedHandler).importGPX(uri, null, getDisplayName(uri));
}
}
else if (requestCode == FilterActivity.REQUEST_SELECT_FILTER && resultCode == Activity.RESULT_OK) {
final int[] filterIndex = data.getIntArrayExtra(FilterActivity.EXTRA_FILTER_RESULT);
setFilter(FilterActivity.getFilterFromPosition(filterIndex[0], filterIndex[1]));
}
if (type == CacheListType.OFFLINE) {
refreshCurrentList();
}
}
private String getDisplayName(final Uri uri) {
Cursor cursor = null;
try {
cursor = getContentResolver().query(uri, new String[] { OpenableColumns.DISPLAY_NAME }, null, null, null);
if (cursor != null && cursor.moveToFirst()) {
return cursor.getString(cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME));
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return null;
}
public void refreshStored(final List<Geocache> caches) {
detailTotal = caches.size();
if (detailTotal == 0) {
return;
}
if (!Network.isNetworkConnected()) {
showToast(getString(R.string.err_server));
return;
}
if (Settings.getChooseList() && (type != CacheListType.OFFLINE && type != CacheListType.HISTORY)) {
// let user select list to store cache in
new StoredList.UserInterface(this).promptForListSelection(R.string.list_title,
new Action1<Integer>() {
@Override
public void call(final Integer selectedListId) {
// in case of online lists, set the list id to a concrete list now
for (final Geocache geocache : caches) {
geocache.setListId(selectedListId);
}
refreshStoredInternal(caches);
}
}, true, StoredList.TEMPORARY_LIST.id, listNameMemento);
} else {
if (type != CacheListType.OFFLINE) {
for (final Geocache geocache : caches) {
if (geocache.getListId() == StoredList.TEMPORARY_LIST.id) {
geocache.setListId(StoredList.STANDARD_LIST_ID);
}
}
}
refreshStoredInternal(caches);
}
}
private void refreshStoredInternal(final List<Geocache> caches) {
detailProgress.set(0);
showProgress(false);
final int etaTime = ((detailTotal * 25) / 60);
final String message;
if (etaTime < 1) {
message = res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm);
} else {
message = res.getString(R.string.caches_downloading) + " " + res.getQuantityString(R.plurals.caches_eta_mins, etaTime, etaTime);
}
final LoadDetailsHandler loadDetailsHandler = new LoadDetailsHandler();
progress.show(this, null, message, ProgressDialog.STYLE_HORIZONTAL, loadDetailsHandler.cancelMessage());
progress.setMaxProgressAndReset(detailTotal);
detailProgressTime = System.currentTimeMillis();
loadDetails(loadDetailsHandler, caches);
}
public void removeFromHistoryCheck() {
final int message = (adapter != null && adapter.getCheckedCount() > 0) ? R.string.cache_remove_from_history
: R.string.cache_clear_history;
Dialogs.confirmYesNo(this, R.string.caches_removing_from_history, message, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
removeFromHistory();
dialog.cancel();
}
});
}
private void removeFromHistory() {
final List<Geocache> caches = adapter.getCheckedOrAllCaches();
final String[] geocodes = new String[caches.size()];
for (int i = 0; i < geocodes.length; i++) {
geocodes[i] = caches.get(i).getGeocode();
}
DataStore.clearVisitDate(geocodes);
refreshCurrentList();
}
private void importWeb() {
// menu is also shown with no device connected
if (!Settings.isRegisteredForSend2cgeo()) {
Dialogs.confirm(this, R.string.web_import_title, R.string.init_sendToCgeo_description, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
SettingsActivity.openForScreen(R.string.preference_screen_sendtocgeo, CacheListActivity.this);
}
});
return;
}
detailProgress.set(0);
showProgress(false);
final DownloadFromWebHandler downloadFromWebHandler = new DownloadFromWebHandler();
progress.show(this, null, res.getString(R.string.web_import_waiting), true, downloadFromWebHandler.cancelMessage());
Send2CgeoDownloader.loadFromWeb(downloadFromWebHandler, listId);
}
private void deleteCachesWithConfirmation() {
final int titleId = (adapter.getCheckedCount() > 0) ? R.string.caches_remove_selected : R.string.caches_remove_all;
final int count = adapter.getCheckedOrAllCount();
final String message = res.getQuantityString(adapter.getCheckedCount() > 0 ? R.plurals.caches_remove_selected_confirm : R.plurals.caches_remove_all_confirm, count, count);
Dialogs.confirmYesNo(this, titleId, message, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
deleteCachesInternal(adapter.getCheckedOrAllCaches());
dialog.cancel();
}
});
}
private void deleteCachesInternal(final @NonNull Collection<Geocache> caches) {
new DeleteCachesFromListCommand(this, caches).execute();
}
/**
* Method to asynchronously refresh the caches details.
*/
private void loadDetails(final CancellableHandler handler, final List<Geocache> caches) {
final Observable<Geocache> allCaches;
if (Settings.isStoreOfflineMaps()) {
final List<Geocache> withStaticMaps = new ArrayList<>(caches.size());
final List<Geocache> withoutStaticMaps = new ArrayList<>(caches.size());
for (final Geocache cache : caches) {
if (cache.hasStaticMap()) {
withStaticMaps.add(cache);
} else {
withoutStaticMaps.add(cache);
}
}
allCaches = Observable.concat(Observable.from(withoutStaticMaps), Observable.from(withStaticMaps));
} else {
allCaches = Observable.from(caches);
}
final Observable<Geocache> loaded = allCaches.flatMap(new Func1<Geocache, Observable<Geocache>>() {
@Override
public Observable<Geocache> call(final Geocache cache) {
return Observable.create(new OnSubscribe<Geocache>() {
@Override
public void call(final Subscriber<? super Geocache> subscriber) {
cache.refreshSynchronous(null);
detailProgress.incrementAndGet();
handler.obtainMessage(DownloadProgress.MSG_LOADED, cache).sendToTarget();
subscriber.onCompleted();
}
}).subscribeOn(AndroidRxUtils.refreshScheduler);
}
}).doOnCompleted(new Action0() {
@Override
public void call() {
handler.sendEmptyMessage(DownloadProgress.MSG_DONE);
}
});
handler.unsubscribeIfCancelled(loaded.subscribe());
}
private static final class DeleteCachesFromListCommand extends DeleteCachesCommand {
private final WeakReference<CacheListActivity> activityRef;
private final int lastListPosition;
public DeleteCachesFromListCommand(@NonNull final CacheListActivity context, final Collection<Geocache> caches) {
super(context, caches);
lastListPosition = context.getListView().getFirstVisiblePosition();
activityRef = new WeakReference<>(context);
}
@Override
public void onFinished() {
final CacheListActivity activity = activityRef.get();
if (activity != null) {
activity.adapter.setSelectMode(false);
activity.refreshCurrentList(AfterLoadAction.CHECK_IF_EMPTY);
activity.replaceCacheListFromSearch();
activity.getListView().setSelection(lastListPosition);
}
}
}
private static void clearOfflineLogs(final Handler handler, final List<Geocache> selectedCaches) {
Schedulers.io().createWorker().schedule(new Action0() {
@Override
public void call() {
DataStore.clearLogsOffline(selectedCaches);
handler.sendEmptyMessage(DownloadProgress.MSG_DONE);
}
});
}
private class MoreCachesListener implements View.OnClickListener {
@Override
public void onClick(final View arg0) {
showProgress(true);
showFooterLoadingCaches();
getSupportLoaderManager().restartLoader(CacheListLoaderType.NEXT_PAGE.getLoaderId(), null, CacheListActivity.this);
}
}
private void hideLoading() {
final ListView list = getListView();
if (list.getVisibility() == View.GONE) {
list.setVisibility(View.VISIBLE);
final View loading = findViewById(R.id.loading);
loading.setVisibility(View.GONE);
}
}
@NonNull
private Action1<Integer> getListSwitchingRunnable() {
return new Action1<Integer>() {
@Override
public void call(final Integer selectedListId) {
switchListById(selectedListId);
}
};
}
private void switchListById(final int id) {
switchListById(id, AfterLoadAction.NO_ACTION);
}
private void switchListById(final int id, @NonNull final AfterLoadAction action) {
if (id < 0) {
return;
}
final Bundle extras = new Bundle();
extras.putSerializable(BUNDLE_ACTION_KEY, action);
if (id == PseudoList.HISTORY_LIST.id) {
type = CacheListType.HISTORY;
getSupportLoaderManager().destroyLoader(CacheListType.OFFLINE.getLoaderId());
currentLoader = (AbstractSearchLoader) getSupportLoaderManager().restartLoader(CacheListType.HISTORY.getLoaderId(), extras, this);
} else {
if (id == PseudoList.ALL_LIST.id) {
listId = id;
title = res.getString(R.string.list_all_lists);
} else {
final StoredList list = DataStore.getList(id);
listId = list.id;
title = list.title;
}
type = CacheListType.OFFLINE;
getSupportLoaderManager().destroyLoader(CacheListType.HISTORY.getLoaderId());
extras.putAll(OfflineGeocacheListLoader.getBundleForList(listId));
currentLoader = (OfflineGeocacheListLoader) getSupportLoaderManager().restartLoader(CacheListType.OFFLINE.getLoaderId(), extras, this);
Settings.saveLastList(listId);
}
initAdapter();
showProgress(true);
showFooterLoadingCaches();
adapter.setSelectMode(false);
invalidateOptionsMenuCompatible();
}
private void renameList() {
(new RenameListCommand(this, listId) {
@Override
protected void onFinished() {
refreshCurrentList();
}
}).execute();
}
private void removeListInternal() {
new DeleteListCommand(this, listId) {
private String oldListName;
@Override
protected boolean canExecute() {
oldListName = DataStore.getList(listId).getTitle();
return super.canExecute();
}
@Override
protected void onFinished() {
refreshSpinnerAdapter();
switchListById(StoredList.STANDARD_LIST_ID);
}
@Override
protected void onFinishedUndo() {
refreshSpinnerAdapter();
for (final StoredList list : DataStore.getLists()) {
if (oldListName.equals(list.getTitle())) {
switchListById(list.id);
}
}
}
}.execute();
}
private void removeList() {
// if there are no caches on this list, don't bother the user with questions.
// there is no harm in deleting the list, he could recreate it easily
if (CollectionUtils.isEmpty(cacheList)) {
removeListInternal();
return;
}
// ask him, if there are caches on the list
Dialogs.confirm(this, R.string.list_dialog_remove_title, R.string.list_dialog_remove_description, R.string.list_dialog_remove, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int whichButton) {
removeListInternal();
}
});
}
public void goMap() {
if (!cacheToShow()) {
return;
}
// apply filter settings (if there's a filter)
final SearchResult searchToUse = getFilteredSearch();
CGeoMap.startActivitySearch(this, searchToUse, title);
}
private void refreshCurrentList() {
refreshCurrentList(AfterLoadAction.NO_ACTION);
}
private void refreshCurrentList(@NonNull final AfterLoadAction action) {
refreshSpinnerAdapter();
switchListById(listId, action);
}
public static void startActivityOffline(final Context context) {
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.OFFLINE);
context.startActivity(cachesIntent);
}
public static void startActivityOwner(final Activity context, final String userName) {
if (!isValidUsername(context, userName)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.OWNER);
cachesIntent.putExtra(Intents.EXTRA_USERNAME, userName);
context.startActivity(cachesIntent);
}
private static boolean isValidUsername(final Activity context, final String username) {
if (StringUtils.isBlank(username)) {
ActivityMixin.showToast(context, R.string.warn_no_username);
return false;
}
return true;
}
public static void startActivityFinder(final Activity context, final String userName) {
if (!isValidUsername(context, userName)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.FINDER);
cachesIntent.putExtra(Intents.EXTRA_USERNAME, userName);
context.startActivity(cachesIntent);
}
private void prepareFilterBar() {
if (Settings.getCacheType() != CacheType.ALL || adapter.isFiltered()) {
final StringBuilder output = new StringBuilder(Settings.getCacheType().getL10n());
if (adapter.isFiltered()) {
output.append(", ").append(adapter.getFilterName());
}
final TextView filterTextView = ButterKnife.findById(this, R.id.filter_text);
filterTextView.setText(output.toString());
findViewById(R.id.filter_bar).setVisibility(View.VISIBLE);
}
else {
findViewById(R.id.filter_bar).setVisibility(View.GONE);
}
}
public static Intent getNearestIntent(final Activity context) {
return Intents.putListType(new Intent(context, CacheListActivity.class), CacheListType.NEAREST);
}
public static Intent getHistoryIntent(final Context context) {
return Intents.putListType(new Intent(context, CacheListActivity.class), CacheListType.HISTORY);
}
public static void startActivityAddress(final Context context, final Geopoint coords, final String address) {
final Intent addressIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(addressIntent, CacheListType.ADDRESS);
addressIntent.putExtra(Intents.EXTRA_COORDS, coords);
addressIntent.putExtra(Intents.EXTRA_ADDRESS, address);
context.startActivity(addressIntent);
}
/**
* start list activity, by searching around the given point.
*
* @param name
* name of coordinates, will lead to a title like "Around ..." instead of directly showing the
* coordinates as title
*/
public static void startActivityCoordinates(final AbstractActivity context, final Geopoint coords, @Nullable final String name) {
if (!isValidCoords(context, coords)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.COORDINATE);
cachesIntent.putExtra(Intents.EXTRA_COORDS, coords);
if (StringUtils.isNotEmpty(name)) {
cachesIntent.putExtra(Intents.EXTRA_TITLE, context.getString(R.string.around, name));
}
context.startActivity(cachesIntent);
}
private static boolean isValidCoords(final AbstractActivity context, final Geopoint coords) {
if (coords == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_no_coordinates));
return false;
}
return true;
}
public static void startActivityKeyword(final AbstractActivity context, final String keyword) {
if (keyword == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_no_keyword));
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.KEYWORD);
cachesIntent.putExtra(Intents.EXTRA_KEYWORD, keyword);
context.startActivity(cachesIntent);
}
public static void startActivityMap(final Context context, final SearchResult search) {
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
cachesIntent.putExtra(Intents.EXTRA_SEARCH, search);
Intents.putListType(cachesIntent, CacheListType.MAP);
context.startActivity(cachesIntent);
}
public static void startActivityPocket(final AbstractActivity context, final @NonNull PocketQueryList pq) {
final String guid = pq.getGuid();
if (guid == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_pocket_query_select));
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.POCKET);
cachesIntent.putExtra(Intents.EXTRA_NAME, pq.getName());
cachesIntent.putExtra(Intents.EXTRA_POCKET_GUID, guid);
context.startActivity(cachesIntent);
}
// Loaders
@Override
public Loader<SearchResult> onCreateLoader(final int type, final Bundle extras) {
if (type >= CacheListLoaderType.values().length) {
throw new IllegalArgumentException("invalid loader type " + type);
}
final CacheListLoaderType enumType = CacheListLoaderType.values()[type];
AbstractSearchLoader loader = null;
switch (enumType) {
case OFFLINE:
// open either the requested or the last list
if (extras.containsKey(Intents.EXTRA_LIST_ID)) {
listId = extras.getInt(Intents.EXTRA_LIST_ID);
} else {
listId = Settings.getLastList();
}
if (listId == PseudoList.ALL_LIST.id) {
title = res.getString(R.string.list_all_lists);
} else if (listId <= StoredList.TEMPORARY_LIST.id) {
listId = StoredList.STANDARD_LIST_ID;
title = res.getString(R.string.stored_caches_button);
} else {
final StoredList list = DataStore.getList(listId);
// list.id may be different if listId was not valid
if (list.id != listId) {
showToast(getString(R.string.list_not_available));
}
listId = list.id;
title = list.title;
}
loader = new OfflineGeocacheListLoader(getBaseContext(), coords, listId);
break;
case HISTORY:
title = res.getString(R.string.caches_history);
listId = PseudoList.HISTORY_LIST.id;
loader = new HistoryGeocacheListLoader(app, coords);
break;
case NEAREST:
title = res.getString(R.string.caches_nearby);
loader = new CoordsGeocacheListLoader(app, coords);
break;
case COORDINATE:
title = coords.toString();
loader = new CoordsGeocacheListLoader(app, coords);
break;
case KEYWORD:
final String keyword = extras.getString(Intents.EXTRA_KEYWORD);
title = listNameMemento.rememberTerm(keyword);
loader = new KeywordGeocacheListLoader(app, keyword);
break;
case ADDRESS:
final String address = extras.getString(Intents.EXTRA_ADDRESS);
if (StringUtils.isNotBlank(address)) {
title = listNameMemento.rememberTerm(address);
} else {
title = coords.toString();
}
loader = new CoordsGeocacheListLoader(app, coords);
break;
case FINDER:
final String username = extras.getString(Intents.EXTRA_USERNAME);
title = listNameMemento.rememberTerm(username);
loader = new FinderGeocacheListLoader(app, username);
break;
case OWNER:
final String ownerName = extras.getString(Intents.EXTRA_USERNAME);
title = listNameMemento.rememberTerm(ownerName);
loader = new OwnerGeocacheListLoader(app, ownerName);
break;
case MAP:
//TODO Build Null loader
title = res.getString(R.string.map_map);
search = (SearchResult) extras.get(Intents.EXTRA_SEARCH);
replaceCacheListFromSearch();
loadCachesHandler.sendMessage(Message.obtain());
break;
case NEXT_PAGE:
loader = new NextPageGeocacheListLoader(app, search);
break;
case POCKET:
final String guid = extras.getString(Intents.EXTRA_POCKET_GUID);
title = extras.getString(Intents.EXTRA_NAME);
loader = new PocketGeocacheListLoader(app, guid);
break;
}
// if there is a title given in the activity start request, use this one instead of the default
if (extras != null && StringUtils.isNotBlank(extras.getString(Intents.EXTRA_TITLE))) {
title = extras.getString(Intents.EXTRA_TITLE);
}
if (loader != null && extras != null && extras.getSerializable(BUNDLE_ACTION_KEY) != null) {
final AfterLoadAction action = (AfterLoadAction) extras.getSerializable(BUNDLE_ACTION_KEY);
loader.setAfterLoadAction(action);
}
updateTitle();
showProgress(true);
showFooterLoadingCaches();
if (loader != null) {
loader.setRecaptchaHandler(new RecaptchaHandler(this, loader));
}
return loader;
}
@Override
public void onLoadFinished(final Loader<SearchResult> arg0, final SearchResult searchIn) {
// The database search was moved into the UI call intentionally. If this is done before the runOnUIThread,
// then we have 2 sets of caches in memory. This can lead to OOM for huge cache lists.
if (searchIn != null) {
cacheList.clear();
final Set<Geocache> cachesFromSearchResult = searchIn.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB);
cacheList.addAll(cachesFromSearchResult);
search = searchIn;
updateAdapter();
updateTitle();
showFooterMoreCaches();
}
showProgress(false);
hideLoading();
invalidateOptionsMenuCompatible();
if (arg0 instanceof AbstractSearchLoader) {
switch (((AbstractSearchLoader) arg0).getAfterLoadAction()) {
case CHECK_IF_EMPTY:
checkIfEmptyAndRemoveAfterConfirm();
break;
case NO_ACTION:
break;
}
}
}
@Override
public void onLoaderReset(final Loader<SearchResult> arg0) {
//Not interesting
}
/**
* Allow the title bar spinner to show the same subtitle like the activity itself would show.
*
*/
public CharSequence getCacheListSubtitle(@NonNull final AbstractList list) {
// if this is the current list, be aware of filtering
if (list.id == listId) {
return getCurrentSubtitle();
}
// otherwise return the overall number
final int numberOfCaches = list.getNumberOfCaches();
if (numberOfCaches < 0) {
return StringUtils.EMPTY;
}
return getCacheNumberString(getResources(), numberOfCaches);
}
/**
* Calculate the subtitle of the current list depending on (optional) filters.
*
*/
private CharSequence getCurrentSubtitle() {
if (search == null) {
return getCacheNumberString(getResources(), 0);
}
final StringBuilder result = new StringBuilder();
if (adapter.isFiltered()) {
result.append(adapter.getCount()).append('/');
}
result.append(getCacheNumberString(getResources(), search.getCount()));
return result.toString();
}
@Override
public ShowcaseViewBuilder getShowcase() {
if (mCacheListSpinnerAdapter != null) {
return new ShowcaseViewBuilder(this)
.setTarget(new ActionViewTarget(this, Type.SPINNER))
.setContent(R.string.showcase_cachelist_title, R.string.showcase_cachelist_text);
}
return null;
}
/**
* Used to indicate if an action should be taken after the AbstractSearchLoader has finished
*/
public enum AfterLoadAction {
/** Take no action */
NO_ACTION,
/** Check if the list is empty and prompt for deletion */
CHECK_IF_EMPTY
}
}
| main/src/cgeo/geocaching/CacheListActivity.java | package cgeo.geocaching;
import cgeo.geocaching.activity.AbstractActivity;
import cgeo.geocaching.activity.AbstractListActivity;
import cgeo.geocaching.activity.ActivityMixin;
import cgeo.geocaching.activity.FilteredActivity;
import cgeo.geocaching.activity.Progress;
import cgeo.geocaching.activity.ShowcaseViewBuilder;
import cgeo.geocaching.apps.cachelist.CacheListApp;
import cgeo.geocaching.apps.cachelist.CacheListAppUtils;
import cgeo.geocaching.apps.cachelist.CacheListApps;
import cgeo.geocaching.apps.cachelist.ListNavigationSelectionActionProvider;
import cgeo.geocaching.apps.navi.NavigationAppFactory;
import cgeo.geocaching.command.DeleteCachesCommand;
import cgeo.geocaching.command.DeleteListCommand;
import cgeo.geocaching.command.MoveToListCommand;
import cgeo.geocaching.command.RenameListCommand;
import cgeo.geocaching.compatibility.Compatibility;
import cgeo.geocaching.connector.gc.RecaptchaHandler;
import cgeo.geocaching.enumerations.CacheListType;
import cgeo.geocaching.enumerations.CacheType;
import cgeo.geocaching.enumerations.LoadFlags;
import cgeo.geocaching.enumerations.StatusCode;
import cgeo.geocaching.export.FieldnoteExport;
import cgeo.geocaching.export.GpxExport;
import cgeo.geocaching.files.GPXImporter;
import cgeo.geocaching.filter.FilterActivity;
import cgeo.geocaching.filter.IFilter;
import cgeo.geocaching.list.AbstractList;
import cgeo.geocaching.list.ListNameMemento;
import cgeo.geocaching.list.PseudoList;
import cgeo.geocaching.list.StoredList;
import cgeo.geocaching.loaders.AbstractSearchLoader;
import cgeo.geocaching.loaders.AbstractSearchLoader.CacheListLoaderType;
import cgeo.geocaching.loaders.CoordsGeocacheListLoader;
import cgeo.geocaching.loaders.FinderGeocacheListLoader;
import cgeo.geocaching.loaders.HistoryGeocacheListLoader;
import cgeo.geocaching.loaders.KeywordGeocacheListLoader;
import cgeo.geocaching.loaders.NextPageGeocacheListLoader;
import cgeo.geocaching.loaders.OfflineGeocacheListLoader;
import cgeo.geocaching.loaders.OwnerGeocacheListLoader;
import cgeo.geocaching.loaders.PocketGeocacheListLoader;
import cgeo.geocaching.location.Geopoint;
import cgeo.geocaching.maps.CGeoMap;
import cgeo.geocaching.models.Geocache;
import cgeo.geocaching.models.PocketQueryList;
import cgeo.geocaching.network.Cookies;
import cgeo.geocaching.network.DownloadProgress;
import cgeo.geocaching.network.Network;
import cgeo.geocaching.network.Send2CgeoDownloader;
import cgeo.geocaching.sensors.GeoData;
import cgeo.geocaching.sensors.GeoDirHandler;
import cgeo.geocaching.sensors.Sensors;
import cgeo.geocaching.settings.Settings;
import cgeo.geocaching.settings.SettingsActivity;
import cgeo.geocaching.sorting.CacheComparator;
import cgeo.geocaching.sorting.SortActionProvider;
import cgeo.geocaching.storage.DataStore;
import cgeo.geocaching.ui.CacheListAdapter;
import cgeo.geocaching.ui.LoggingUI;
import cgeo.geocaching.ui.WeakReferenceHandler;
import cgeo.geocaching.ui.dialog.Dialogs;
import cgeo.geocaching.utils.AngleUtils;
import cgeo.geocaching.utils.CalendarUtils;
import cgeo.geocaching.utils.CancellableHandler;
import cgeo.geocaching.utils.Log;
import cgeo.geocaching.utils.AndroidRxUtils;
import com.github.amlcurran.showcaseview.targets.ActionViewTarget;
import com.github.amlcurran.showcaseview.targets.ActionViewTarget.Type;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.provider.OpenableColumns;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.ActionBar;
import android.view.ContextMenu;
import android.view.ContextMenu.ContextMenuInfo;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView.AdapterContextMenuInfo;
import android.widget.ListView;
import android.widget.TextView;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import butterknife.ButterKnife;
import rx.Observable;
import rx.Observable.OnSubscribe;
import rx.Subscriber;
import rx.Subscription;
import rx.functions.Action0;
import rx.functions.Action1;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
public class CacheListActivity extends AbstractListActivity implements FilteredActivity, LoaderManager.LoaderCallbacks<SearchResult> {
private static final int MAX_LIST_ITEMS = 1000;
private static final int REQUEST_CODE_IMPORT_GPX = 1;
private static final String STATE_FILTER = "currentFilter";
private static final String STATE_INVERSE_SORT = "currentInverseSort";
private static final String STATE_LIST_TYPE = "currentListType";
private static final String STATE_LIST_ID = "currentListId";
private static final String BUNDLE_ACTION_KEY ="afterLoadAction";
private CacheListType type = null;
private Geopoint coords = null;
private SearchResult search = null;
/** The list of shown caches shared with Adapter. Don't manipulate outside of main thread only with Handler */
private final List<Geocache> cacheList = new ArrayList<>();
private CacheListAdapter adapter = null;
private View listFooter = null;
private TextView listFooterText = null;
private final Progress progress = new Progress();
private String title = "";
private int detailTotal = 0;
private final AtomicInteger detailProgress = new AtomicInteger(0);
private long detailProgressTime = 0L;
private int listId = StoredList.TEMPORARY_LIST.id; // Only meaningful for the OFFLINE type
private final GeoDirHandler geoDirHandler = new GeoDirHandler() {
@Override
public void updateDirection(final float direction) {
if (Settings.isLiveList()) {
adapter.setActualHeading(AngleUtils.getDirectionNow(direction));
}
}
@Override
public void updateGeoData(final GeoData geoData) {
adapter.setActualCoordinates(geoData.getCoords());
}
};
private ContextMenuInfo lastMenuInfo;
private String contextMenuGeocode = "";
private Subscription resumeSubscription;
private final ListNameMemento listNameMemento = new ListNameMemento();
// FIXME: This method has mostly been replaced by the loaders. But it still contains a license agreement check.
public void handleCachesLoaded() {
try {
updateAdapter();
updateTitle();
showFooterMoreCaches();
if (search != null && search.getError() == StatusCode.UNAPPROVED_LICENSE) {
showLicenseConfirmationDialog();
} else if (search != null && search.getError() != null) {
showToast(res.getString(R.string.err_download_fail) + ' ' + search.getError().getErrorString(res) + '.');
hideLoading();
showProgress(false);
finish();
return;
}
setAdapterCurrentCoordinates(false);
} catch (final Exception e) {
showToast(res.getString(R.string.err_detail_cache_find_any));
Log.e("CacheListActivity.loadCachesHandler", e);
hideLoading();
showProgress(false);
finish();
return;
}
try {
hideLoading();
showProgress(false);
} catch (final Exception e2) {
Log.e("CacheListActivity.loadCachesHandler.2", e2);
}
adapter.setSelectMode(false);
}
private void showLicenseConfirmationDialog() {
final AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setTitle(res.getString(R.string.license));
dialog.setMessage(res.getString(R.string.err_license));
dialog.setCancelable(true);
dialog.setNegativeButton(res.getString(R.string.license_dismiss), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
Cookies.clearCookies();
dialog.cancel();
}
});
dialog.setPositiveButton(res.getString(R.string.license_show), new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
Cookies.clearCookies();
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.geocaching.com/software/agreement.aspx?ID=0")));
}
});
final AlertDialog alert = dialog.create();
alert.show();
}
private final Handler loadCachesHandler = new LoadCachesHandler(this);
private static class LoadCachesHandler extends WeakReferenceHandler<CacheListActivity> {
protected LoadCachesHandler(final CacheListActivity activity) {
super(activity);
}
@Override
public void handleMessage(final Message msg) {
final CacheListActivity activity = getActivity();
if (activity == null) {
return;
}
activity.handleCachesLoaded();
}
}
/**
* Loads the caches and fills the {@link #cacheList} according to {@link #search} content.
*
* If {@link #search} is <code>null</code>, this does nothing.
*/
private void replaceCacheListFromSearch() {
if (search != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
cacheList.clear();
// The database search was moved into the UI call intentionally. If this is done before the runOnUIThread,
// then we have 2 sets of caches in memory. This can lead to OOM for huge cache lists.
final Set<Geocache> cachesFromSearchResult = search.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB);
cacheList.addAll(cachesFromSearchResult);
adapter.reFilter();
updateTitle();
showFooterMoreCaches();
}
});
}
}
private static String getCacheNumberString(final Resources res, final int count) {
return res.getQuantityString(R.plurals.cache_counts, count, count);
}
protected void updateTitle() {
setTitle(title);
getSupportActionBar().setSubtitle(getCurrentSubtitle());
refreshSpinnerAdapter();
}
private class LoadDetailsHandler extends CancellableHandler {
@Override
public void handleRegularMessage(final Message msg) {
updateAdapter();
if (msg.what == DownloadProgress.MSG_LOADED) {
((Geocache) msg.obj).setStatusChecked(false);
adapter.notifyDataSetChanged();
final int dp = detailProgress.get();
final int secondsElapsed = (int) ((System.currentTimeMillis() - detailProgressTime) / 1000);
final int minutesRemaining = ((detailTotal - dp) * secondsElapsed / ((dp > 0) ? dp : 1) / 60);
progress.setProgress(dp);
if (minutesRemaining < 1) {
progress.setMessage(res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm));
} else {
progress.setMessage(res.getString(R.string.caches_downloading) + " " + res.getQuantityString(R.plurals.caches_eta_mins, minutesRemaining, minutesRemaining));
}
} else {
new AsyncTask<Void, Void, Set<Geocache>>() {
@Override
protected Set<Geocache> doInBackground(final Void... params) {
return search != null ? search.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB) : null;
}
@Override
protected void onPostExecute(final Set<Geocache> result) {
if (CollectionUtils.isNotEmpty(result)) {
cacheList.clear();
cacheList.addAll(result);
adapter.reFilter();
}
setAdapterCurrentCoordinates(false);
showProgress(false);
progress.dismiss();
}
}.execute();
}
}
}
/**
* TODO Possibly parts should be a Thread not a Handler
*/
private class DownloadFromWebHandler extends CancellableHandler {
@Override
public void handleRegularMessage(final Message msg) {
updateAdapter();
adapter.notifyDataSetChanged();
switch (msg.what) {
case DownloadProgress.MSG_WAITING: //no caches
progress.setMessage(res.getString(R.string.web_import_waiting));
break;
case DownloadProgress.MSG_LOADING: //cache downloading
progress.setMessage(res.getString(R.string.web_downloading) + " " + msg.obj + '…');
break;
case DownloadProgress.MSG_LOADED: //Cache downloaded
progress.setMessage(res.getString(R.string.web_downloaded) + " " + msg.obj + '…');
refreshCurrentList();
break;
case DownloadProgress.MSG_SERVER_FAIL:
progress.dismiss();
showToast(res.getString(R.string.sendToCgeo_download_fail));
finish();
break;
case DownloadProgress.MSG_NO_REGISTRATION:
progress.dismiss();
showToast(res.getString(R.string.sendToCgeo_no_registration));
finish();
break;
default: // MSG_DONE
adapter.setSelectMode(false);
replaceCacheListFromSearch();
progress.dismiss();
break;
}
}
}
private final CancellableHandler clearOfflineLogsHandler = new CancellableHandler() {
@Override
public void handleRegularMessage(final Message msg) {
adapter.setSelectMode(false);
refreshCurrentList();
replaceCacheListFromSearch();
progress.dismiss();
}
};
private final Handler importGpxAttachementFinishedHandler = new Handler() {
@Override
public void handleMessage(final Message msg) {
refreshCurrentList();
}
};
private AbstractSearchLoader currentLoader;
public CacheListActivity() {
super(true);
}
@Override
public void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setTheme();
setContentView(R.layout.cacheslist_activity);
// get parameters
Bundle extras = getIntent().getExtras();
if (extras != null) {
type = Intents.getListType(getIntent());
coords = extras.getParcelable(Intents.EXTRA_COORDS);
}
else {
extras = new Bundle();
}
if (isInvokedFromAttachment()) {
type = CacheListType.OFFLINE;
if (coords == null) {
coords = Geopoint.ZERO;
}
}
if (type == CacheListType.NEAREST) {
coords = Sensors.getInstance().currentGeo().getCoords();
}
setTitle(title);
// Check whether we're recreating a previously destroyed instance
if (savedInstanceState != null) {
// Restore value of members from saved state
currentFilter = savedInstanceState.getParcelable(STATE_FILTER);
currentInverseSort = savedInstanceState.getBoolean(STATE_INVERSE_SORT);
type = CacheListType.values()[savedInstanceState.getInt(STATE_LIST_TYPE, type.ordinal())];
listId = savedInstanceState.getInt(STATE_LIST_ID);
}
initAdapter();
prepareFilterBar();
if (type.canSwitch) {
initActionBarSpinner();
}
currentLoader = (AbstractSearchLoader) getSupportLoaderManager().initLoader(type.getLoaderId(), extras, this);
// init
if (CollectionUtils.isNotEmpty(cacheList)) {
// currentLoader can be null if this activity is created from a map, as onCreateLoader() will return null.
if (currentLoader != null && currentLoader.isStarted()) {
showFooterLoadingCaches();
} else {
showFooterMoreCaches();
}
}
if (isInvokedFromAttachment()) {
importGpxAttachement();
}
else {
presentShowcase();
}
}
@Override
public void onSaveInstanceState(final Bundle savedInstanceState) {
// Always call the superclass so it can save the view hierarchy state
super.onSaveInstanceState(savedInstanceState);
// Save the current Filter
savedInstanceState.putParcelable(STATE_FILTER, currentFilter);
savedInstanceState.putBoolean(STATE_INVERSE_SORT, adapter.getInverseSort());
savedInstanceState.putInt(STATE_LIST_TYPE, type.ordinal());
savedInstanceState.putInt(STATE_LIST_ID, listId);
}
/**
* Action bar spinner adapter. {@code null} for list types that don't allow switching (search results, ...).
*/
CacheListSpinnerAdapter mCacheListSpinnerAdapter;
/**
* remember current filter when switching between lists, so it can be re-applied afterwards
*/
private IFilter currentFilter = null;
private boolean currentInverseSort = false;
private SortActionProvider sortProvider;
private void initActionBarSpinner() {
mCacheListSpinnerAdapter = new CacheListSpinnerAdapter(this, R.layout.support_simple_spinner_dropdown_item);
getSupportActionBar().setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
getSupportActionBar().setDisplayShowTitleEnabled(false);
getSupportActionBar().setListNavigationCallbacks(mCacheListSpinnerAdapter, new ActionBar.OnNavigationListener() {
@Override
public boolean onNavigationItemSelected(final int i, final long l) {
final int newListId = mCacheListSpinnerAdapter.getItem(i).id;
if (newListId != listId) {
switchListById(newListId);
}
return true;
}
});
}
private void refreshSpinnerAdapter() {
/* If the activity does not use the Spinner this will be null */
if (mCacheListSpinnerAdapter==null) {
return;
}
mCacheListSpinnerAdapter.clear();
final AbstractList list = AbstractList.getListById(listId);
for (final AbstractList l: StoredList.UserInterface.getMenuLists(false, PseudoList.NEW_LIST.id)) {
mCacheListSpinnerAdapter.add(l);
}
getSupportActionBar().setSelectedNavigationItem(mCacheListSpinnerAdapter.getPosition(list));
}
@Override
public void onConfigurationChanged(final Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (currentLoader != null && currentLoader.isLoading()) {
showFooterLoadingCaches();
}
}
private boolean isConcreteList() {
return type == CacheListType.OFFLINE &&
(listId == StoredList.STANDARD_LIST_ID || listId >= DataStore.customListIdOffset);
}
private boolean isInvokedFromAttachment() {
final Intent intent = getIntent();
return Intent.ACTION_VIEW.equals(intent.getAction()) && intent.getData() != null;
}
private void importGpxAttachement() {
new StoredList.UserInterface(this).promptForListSelection(R.string.gpx_import_select_list_title, new Action1<Integer>() {
@Override
public void call(final Integer listId) {
new GPXImporter(CacheListActivity.this, listId, importGpxAttachementFinishedHandler).importGPX();
switchListById(listId);
}
}, true, 0);
}
@Override
public void onResume() {
super.onResume();
resumeSubscription = geoDirHandler.start(GeoDirHandler.UPDATE_GEODATA | GeoDirHandler.UPDATE_DIRECTION | GeoDirHandler.LOW_POWER, 250, TimeUnit.MILLISECONDS);
adapter.setSelectMode(false);
setAdapterCurrentCoordinates(true);
if (search != null) {
replaceCacheListFromSearch();
loadCachesHandler.sendEmptyMessage(0);
}
// refresh standard list if it has changed (new caches downloaded)
if (type == CacheListType.OFFLINE && (listId >= StoredList.STANDARD_LIST_ID || listId == PseudoList.ALL_LIST.id) && search != null) {
final SearchResult newSearch = DataStore.getBatchOfStoredCaches(coords, Settings.getCacheType(), listId);
if (newSearch.getTotalCountGC() != search.getTotalCountGC()) {
refreshCurrentList();
}
}
}
private void setAdapterCurrentCoordinates(final boolean forceSort) {
adapter.setActualCoordinates(Sensors.getInstance().currentGeo().getCoords());
if (forceSort) {
adapter.forceSort();
}
}
@Override
public void onPause() {
resumeSubscription.unsubscribe();
super.onPause();
}
@Override
public boolean onCreateOptionsMenu(final Menu menu) {
getMenuInflater().inflate(R.menu.cache_list_options, menu);
sortProvider = (SortActionProvider) MenuItemCompat.getActionProvider(menu.findItem(R.id.menu_sort));
assert sortProvider != null; // We set it in the XML file
sortProvider.setSelection(adapter.getCacheComparator());
sortProvider.setIsEventsOnly(adapter.isEventsOnly());
sortProvider.setClickListener(new Action1<CacheComparator>() {
@Override
public void call(final CacheComparator selectedComparator) {
final CacheComparator oldComparator = adapter.getCacheComparator();
// selecting the same sorting twice will toggle the order
if (selectedComparator != null && oldComparator != null && selectedComparator.getClass().equals(oldComparator.getClass())) {
adapter.toggleInverseSort();
} else {
// always reset the inversion for a new sorting criteria
adapter.resetInverseSort();
}
setComparator(selectedComparator);
sortProvider.setSelection(selectedComparator);
}
});
ListNavigationSelectionActionProvider.initialize(menu.findItem(R.id.menu_cache_list_app_provider), new ListNavigationSelectionActionProvider.Callback() {
@Override
public void onListNavigationSelected(final CacheListApp app) {
app.invoke(CacheListAppUtils.filterCoords(cacheList), CacheListActivity.this, getFilteredSearch());
}
});
return true;
}
private static void setVisible(final Menu menu, final int itemId, final boolean visible) {
menu.findItem(itemId).setVisible(visible);
}
@Override
public boolean onPrepareOptionsMenu(final Menu menu) {
super.onPrepareOptionsMenu(menu);
final boolean isHistory = type == CacheListType.HISTORY;
final boolean isOffline = type == CacheListType.OFFLINE;
final boolean isEmpty = cacheList.isEmpty();
final boolean isConcrete = isConcreteList();
try {
if (adapter.isSelectMode()) {
menu.findItem(R.id.menu_switch_select_mode).setTitle(res.getString(R.string.caches_select_mode_exit))
.setIcon(R.drawable.ic_menu_clear_playlist);
} else {
menu.findItem(R.id.menu_switch_select_mode).setTitle(res.getString(R.string.caches_select_mode))
.setIcon(R.drawable.ic_menu_agenda);
}
menu.findItem(R.id.menu_invert_selection).setVisible(adapter.isSelectMode());
setVisible(menu, R.id.menu_show_on_map, !isEmpty);
setVisible(menu, R.id.menu_filter, search != null && search.getCount() > 0);
setVisible(menu, R.id.menu_switch_select_mode, !isEmpty);
setVisible(menu, R.id.menu_create_list, isOffline);
setVisible(menu, R.id.menu_sort, !isEmpty && !isHistory);
setVisible(menu, R.id.menu_refresh_stored, !isEmpty && (isConcrete || type != CacheListType.OFFLINE));
setVisible(menu, R.id.menu_drop_caches, !isEmpty && isOffline);
setVisible(menu, R.id.menu_delete_events, isConcrete && !isEmpty && containsPastEvents());
setVisible(menu, R.id.menu_move_to_list, isOffline && !isEmpty);
setVisible(menu, R.id.menu_remove_from_history, !isEmpty && isHistory);
setVisible(menu, R.id.menu_clear_offline_logs, !isEmpty && (isHistory || isOffline) && containsOfflineLogs());
setVisible(menu, R.id.menu_import, isOffline);
setVisible(menu, R.id.menu_import_web, isOffline);
setVisible(menu, R.id.menu_import_gpx, isOffline);
setVisible(menu, R.id.menu_export, !isEmpty);
if (!isOffline && !isHistory) {
menu.findItem(R.id.menu_refresh_stored).setTitle(R.string.caches_store_offline);
}
final boolean isNonDefaultList = isConcrete && listId != StoredList.STANDARD_LIST_ID;
if (isOffline || type == CacheListType.HISTORY) { // only offline list
setMenuItemLabel(menu, R.id.menu_drop_caches, R.string.caches_remove_selected, R.string.caches_remove_all);
setMenuItemLabel(menu, R.id.menu_refresh_stored, R.string.caches_refresh_selected, R.string.caches_refresh_all);
setMenuItemLabel(menu, R.id.menu_move_to_list, R.string.caches_move_selected, R.string.caches_move_all);
} else { // search and global list (all other than offline and history)
setMenuItemLabel(menu, R.id.menu_refresh_stored, R.string.caches_store_selected, R.string.caches_store_offline);
}
menu.findItem(R.id.menu_drop_list).setVisible(isNonDefaultList);
menu.findItem(R.id.menu_rename_list).setVisible(isNonDefaultList);
setMenuItemLabel(menu, R.id.menu_remove_from_history, R.string.cache_remove_from_history, R.string.cache_clear_history);
menu.findItem(R.id.menu_import_android).setVisible(Compatibility.isStorageAccessFrameworkAvailable() && isOffline);
final List<CacheListApp> listNavigationApps = CacheListApps.getActiveApps();
menu.findItem(R.id.menu_cache_list_app_provider).setVisible(!isEmpty && listNavigationApps.size() > 1);
menu.findItem(R.id.menu_cache_list_app).setVisible(!isEmpty && listNavigationApps.size() == 1);
} catch (final RuntimeException e) {
Log.e("CacheListActivity.onPrepareOptionsMenu", e);
}
return true;
}
private boolean containsPastEvents() {
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (CalendarUtils.isPastEvent(cache)) {
return true;
}
}
return false;
}
private boolean containsOfflineLogs() {
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (cache.isLogOffline()) {
return true;
}
}
return false;
}
private void setMenuItemLabel(final Menu menu, final int menuId, final int resIdSelection, final int resId) {
final MenuItem menuItem = menu.findItem(menuId);
if (menuItem == null) {
return;
}
final boolean hasSelection = adapter != null && adapter.getCheckedCount() > 0;
if (hasSelection) {
menuItem.setTitle(res.getString(resIdSelection) + " (" + adapter.getCheckedCount() + ")");
} else {
menuItem.setTitle(res.getString(resId));
}
}
@Override
public boolean onOptionsItemSelected(final MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_show_on_map:
goMap();
return true;
case R.id.menu_switch_select_mode:
adapter.switchSelectMode();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_refresh_stored:
refreshStored(adapter.getCheckedOrAllCaches());
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_drop_caches:
deleteCachesWithConfirmation();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_import_gpx:
importGpx();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_import_android:
importGpxFromAndroid();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_create_list:
new StoredList.UserInterface(this).promptForListCreation(getListSwitchingRunnable(), listNameMemento.getTerm());
refreshSpinnerAdapter();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_drop_list:
removeList();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_rename_list:
renameList();
return true;
case R.id.menu_invert_selection:
adapter.invertSelection();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_filter:
showFilterMenu(null);
return true;
case R.id.menu_import_web:
importWeb();
return true;
case R.id.menu_export_gpx:
new GpxExport().export(adapter.getCheckedOrAllCaches(), this);
return true;
case R.id.menu_export_fieldnotes:
new FieldnoteExport().export(adapter.getCheckedOrAllCaches(), this);
return true;
case R.id.menu_remove_from_history:
removeFromHistoryCheck();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_move_to_list:
moveCachesToOtherList(adapter.getCheckedOrAllCaches());
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_delete_events:
deletePastEvents();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_clear_offline_logs:
clearOfflineLogs();
invalidateOptionsMenuCompatible();
return true;
case R.id.menu_cache_list_app:
if (cacheToShow()) {
CacheListApps.getActiveApps().get(0).invoke(CacheListAppUtils.filterCoords(cacheList), this, getFilteredSearch());
}
return true;
}
return super.onOptionsItemSelected(item);
}
private void checkIfEmptyAndRemoveAfterConfirm() {
final boolean isNonDefaultList = isConcreteList() && listId != StoredList.STANDARD_LIST_ID;
if (isNonDefaultList && CollectionUtils.isEmpty(cacheList)) {
// ask user, if he wants to delete the now empty list
Dialogs.confirmYesNo(this, R.string.list_dialog_remove_title, R.string.list_dialog_remove_nowempty, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int whichButton) {
removeListInternal();
}
});
}
}
private boolean cacheToShow() {
if (search == null || CollectionUtils.isEmpty(cacheList)) {
showToast(res.getString(R.string.warn_no_cache_coord));
return false;
}
return true;
}
private SearchResult getFilteredSearch() {
return new SearchResult(Geocache.getGeocodes(adapter.getFilteredList()));
}
private void deletePastEvents() {
final List<Geocache> deletion = new ArrayList<>();
for (final Geocache cache : adapter.getCheckedOrAllCaches()) {
if (CalendarUtils.isPastEvent(cache)) {
deletion.add(cache);
}
}
deleteCachesInternal(deletion);
}
private void clearOfflineLogs() {
Dialogs.confirmYesNo(this, R.string.caches_clear_offlinelogs, R.string.caches_clear_offlinelogs_message, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
progress.show(CacheListActivity.this, null, res.getString(R.string.caches_clear_offlinelogs_progress), true, clearOfflineLogsHandler.cancelMessage());
clearOfflineLogs(clearOfflineLogsHandler, adapter.getCheckedOrAllCaches());
}
});
}
/**
* called from the filter bar view
*/
@Override
public void showFilterMenu(final View view) {
FilterActivity.selectFilter(this);
}
private void setComparator(final CacheComparator comparator) {
adapter.setComparator(comparator);
currentInverseSort = adapter.getInverseSort();
}
@Override
public void onCreateContextMenu(final ContextMenu menu, final View view, final ContextMenu.ContextMenuInfo info) {
super.onCreateContextMenu(menu, view, info);
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (final Exception e) {
Log.w("CacheListActivity.onCreateContextMenu", e);
}
if (adapterInfo == null || adapterInfo.position >= adapter.getCount()) {
return;
}
final Geocache cache = adapter.getItem(adapterInfo.position);
menu.setHeaderTitle(StringUtils.defaultIfBlank(cache.getName(), cache.getGeocode()));
contextMenuGeocode = cache.getGeocode();
getMenuInflater().inflate(R.menu.cache_list_context, menu);
menu.findItem(R.id.menu_default_navigation).setTitle(NavigationAppFactory.getDefaultNavigationApplication().getName());
final boolean hasCoords = cache.getCoords() != null;
menu.findItem(R.id.menu_default_navigation).setVisible(hasCoords);
menu.findItem(R.id.menu_navigate).setVisible(hasCoords);
menu.findItem(R.id.menu_cache_details).setVisible(hasCoords);
final boolean isOffline = cache.isOffline();
menu.findItem(R.id.menu_drop_cache).setVisible(isOffline);
menu.findItem(R.id.menu_move_to_list).setVisible(isOffline);
menu.findItem(R.id.menu_refresh).setVisible(isOffline);
menu.findItem(R.id.menu_store_cache).setVisible(!isOffline);
LoggingUI.onPrepareOptionsMenu(menu, cache, adapterInfo.targetView);
}
private void moveCachesToOtherList(final Collection<Geocache> caches) {
new MoveToListCommand(this, caches, listId) {
@Override
protected void onFinished() {
adapter.setSelectMode(false);
refreshCurrentList(AfterLoadAction.CHECK_IF_EMPTY);
}
}.execute();
}
@Override
public boolean onContextItemSelected(final MenuItem item) {
ContextMenu.ContextMenuInfo info = item.getMenuInfo();
// restore menu info for sub menu items, see
// https://code.google.com/p/android/issues/detail?id=7139
if (info == null) {
info = lastMenuInfo;
lastMenuInfo = null;
}
AdapterContextMenuInfo adapterInfo = null;
try {
adapterInfo = (AdapterContextMenuInfo) info;
} catch (final Exception e) {
Log.w("CacheListActivity.onContextItemSelected", e);
}
final Geocache cache = adapterInfo != null ? getCacheFromAdapter(adapterInfo) : null;
// just in case the list got resorted while we are executing this code
if (cache == null) {
return true;
}
switch (item.getItemId()) {
case R.id.menu_default_navigation:
NavigationAppFactory.startDefaultNavigationApplication(1, this, cache);
break;
case R.id.menu_navigate:
NavigationAppFactory.showNavigationMenu(this, cache, null, null);
break;
case R.id.menu_cache_details:
CacheDetailActivity.startActivity(this, cache.getGeocode(), cache.getName());
break;
case R.id.menu_drop_cache:
deleteCachesInternal(Collections.singletonList(cache));
break;
case R.id.menu_move_to_list:
moveCachesToOtherList(Collections.singletonList(cache));
break;
case R.id.menu_store_cache:
case R.id.menu_refresh:
refreshStored(Collections.singletonList(cache));
break;
default:
// we must remember the menu info for the sub menu, there is a bug
// in Android:
// https://code.google.com/p/android/issues/detail?id=7139
lastMenuInfo = info;
LoggingUI.onMenuItemSelected(item, this, cache);
}
return true;
}
/**
* Extract a cache from adapter data.
*
* @param adapterInfo
* an adapterInfo
* @return the pointed cache
*/
private Geocache getCacheFromAdapter(final AdapterContextMenuInfo adapterInfo) {
final Geocache cache = adapter.getItem(adapterInfo.position);
if (cache.getGeocode().equalsIgnoreCase(contextMenuGeocode)) {
return cache;
}
return adapter.findCacheByGeocode(contextMenuGeocode);
}
private boolean setFilter(final IFilter filter) {
currentFilter = filter;
adapter.setFilter(filter);
prepareFilterBar();
updateTitle();
invalidateOptionsMenuCompatible();
return true;
}
@Override
public boolean onKeyDown(final int keyCode, final KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (adapter.isSelectMode()) {
adapter.setSelectMode(false);
return true;
}
}
return super.onKeyDown(keyCode, event);
}
private void initAdapter() {
final ListView listView = getListView();
registerForContextMenu(listView);
adapter = new CacheListAdapter(this, cacheList, type);
adapter.setFilter(currentFilter);
if (listFooter == null) {
listFooter = getLayoutInflater().inflate(R.layout.cacheslist_footer, listView, false);
listFooter.setClickable(true);
listFooter.setOnClickListener(new MoreCachesListener());
listFooterText = ButterKnife.findById(listFooter, R.id.more_caches);
listView.addFooterView(listFooter);
}
setListAdapter(adapter);
adapter.setInverseSort(currentInverseSort);
adapter.forceSort();
}
private void updateAdapter() {
adapter.notifyDataSetChanged();
adapter.reFilter();
adapter.checkEvents();
adapter.forceSort();
}
private void showFooterLoadingCaches() {
// no footer for offline lists
if (listFooter == null) {
return;
}
listFooterText.setText(res.getString(R.string.caches_more_caches_loading));
listFooter.setClickable(false);
listFooter.setOnClickListener(null);
}
private void showFooterMoreCaches() {
// no footer in offline lists
if (listFooter == null) {
return;
}
boolean enableMore = type != CacheListType.OFFLINE && cacheList.size() < MAX_LIST_ITEMS;
if (enableMore && search != null) {
final int count = search.getTotalCountGC();
enableMore = count > 0 && cacheList.size() < count;
}
listFooter.setClickable(enableMore);
if (enableMore) {
listFooterText.setText(res.getString(R.string.caches_more_caches) + " (" + res.getString(R.string.caches_more_caches_currently) + ": " + cacheList.size() + ")");
listFooter.setOnClickListener(new MoreCachesListener());
} else if (type != CacheListType.OFFLINE) {
listFooterText.setText(res.getString(CollectionUtils.isEmpty(cacheList) ? R.string.caches_no_cache : R.string.caches_more_caches_no));
listFooter.setOnClickListener(null);
} else {
// hiding footer for offline list is not possible, it must be removed instead
// http://stackoverflow.com/questions/7576099/hiding-footer-in-listview
getListView().removeFooterView(listFooter);
}
}
private void importGpx() {
GpxFileListActivity.startSubActivity(this, listId);
}
private void importGpxFromAndroid() {
Compatibility.importGpxFromStorageAccessFramework(this, REQUEST_CODE_IMPORT_GPX);
}
@Override
protected void onActivityResult(final int requestCode, final int resultCode, final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_CODE_IMPORT_GPX && resultCode == Activity.RESULT_OK) {
// The document selected by the user won't be returned in the intent.
// Instead, a URI to that document will be contained in the return intent
// provided to this method as a parameter. Pull that uri using "resultData.getData()"
if (data != null) {
final Uri uri = data.getData();
new GPXImporter(this, listId, importGpxAttachementFinishedHandler).importGPX(uri, null, getDisplayName(uri));
}
}
else if (requestCode == FilterActivity.REQUEST_SELECT_FILTER && resultCode == Activity.RESULT_OK) {
final int[] filterIndex = data.getIntArrayExtra(FilterActivity.EXTRA_FILTER_RESULT);
setFilter(FilterActivity.getFilterFromPosition(filterIndex[0], filterIndex[1]));
}
if (type == CacheListType.OFFLINE) {
refreshCurrentList();
}
}
private String getDisplayName(final Uri uri) {
Cursor cursor = null;
try {
cursor = getContentResolver().query(uri, new String[] { OpenableColumns.DISPLAY_NAME }, null, null, null);
if (cursor != null && cursor.moveToFirst()) {
return cursor.getString(cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME));
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return null;
}
public void refreshStored(final List<Geocache> caches) {
detailTotal = caches.size();
if (detailTotal == 0) {
return;
}
if (!Network.isNetworkConnected()) {
showToast(getString(R.string.err_server));
return;
}
if (Settings.getChooseList() && (type != CacheListType.OFFLINE && type != CacheListType.HISTORY)) {
// let user select list to store cache in
new StoredList.UserInterface(this).promptForListSelection(R.string.list_title,
new Action1<Integer>() {
@Override
public void call(final Integer selectedListId) {
// in case of online lists, set the list id to a concrete list now
for (final Geocache geocache : caches) {
geocache.setListId(selectedListId);
}
refreshStoredInternal(caches);
}
}, true, StoredList.TEMPORARY_LIST.id, listNameMemento);
} else {
if (type != CacheListType.OFFLINE) {
for (final Geocache geocache : caches) {
if (geocache.getListId() == StoredList.TEMPORARY_LIST.id) {
geocache.setListId(StoredList.STANDARD_LIST_ID);
}
}
}
refreshStoredInternal(caches);
}
}
private void refreshStoredInternal(final List<Geocache> caches) {
detailProgress.set(0);
showProgress(false);
final int etaTime = ((detailTotal * 25) / 60);
final String message;
if (etaTime < 1) {
message = res.getString(R.string.caches_downloading) + " " + res.getString(R.string.caches_eta_ltm);
} else {
message = res.getString(R.string.caches_downloading) + " " + res.getQuantityString(R.plurals.caches_eta_mins, etaTime, etaTime);
}
final LoadDetailsHandler loadDetailsHandler = new LoadDetailsHandler();
progress.show(this, null, message, ProgressDialog.STYLE_HORIZONTAL, loadDetailsHandler.cancelMessage());
progress.setMaxProgressAndReset(detailTotal);
detailProgressTime = System.currentTimeMillis();
loadDetails(loadDetailsHandler, caches);
}
public void removeFromHistoryCheck() {
final int message = (adapter != null && adapter.getCheckedCount() > 0) ? R.string.cache_remove_from_history
: R.string.cache_clear_history;
Dialogs.confirmYesNo(this, R.string.caches_removing_from_history, message, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
removeFromHistory();
dialog.cancel();
}
});
}
private void removeFromHistory() {
final List<Geocache> caches = adapter.getCheckedOrAllCaches();
final String[] geocodes = new String[caches.size()];
for (int i = 0; i < geocodes.length; i++) {
geocodes[i] = caches.get(i).getGeocode();
}
DataStore.clearVisitDate(geocodes);
refreshCurrentList();
}
private void importWeb() {
// menu is also shown with no device connected
if (!Settings.isRegisteredForSend2cgeo()) {
Dialogs.confirm(this, R.string.web_import_title, R.string.init_sendToCgeo_description, new OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int which) {
SettingsActivity.openForScreen(R.string.preference_screen_sendtocgeo, CacheListActivity.this);
}
});
return;
}
detailProgress.set(0);
showProgress(false);
final DownloadFromWebHandler downloadFromWebHandler = new DownloadFromWebHandler();
progress.show(this, null, res.getString(R.string.web_import_waiting), true, downloadFromWebHandler.cancelMessage());
Send2CgeoDownloader.loadFromWeb(downloadFromWebHandler, listId);
}
private void deleteCachesWithConfirmation() {
final int titleId = (adapter.getCheckedCount() > 0) ? R.string.caches_remove_selected : R.string.caches_remove_all;
final int count = adapter.getCheckedOrAllCount();
final String message = res.getQuantityString(adapter.getCheckedCount() > 0 ? R.plurals.caches_remove_selected_confirm : R.plurals.caches_remove_all_confirm, count, count);
Dialogs.confirmYesNo(this, titleId, message, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int id) {
deleteCachesInternal(adapter.getCheckedOrAllCaches());
dialog.cancel();
}
});
}
private void deleteCachesInternal(final @NonNull Collection<Geocache> caches) {
new DeleteCachesFromListCommand(this, caches).execute();
}
/**
* Method to asynchronously refresh the caches details.
*/
private void loadDetails(final CancellableHandler handler, final List<Geocache> caches) {
final Observable<Geocache> allCaches;
if (Settings.isStoreOfflineMaps()) {
final List<Geocache> withStaticMaps = new ArrayList<>(caches.size());
final List<Geocache> withoutStaticMaps = new ArrayList<>(caches.size());
for (final Geocache cache : caches) {
if (cache.hasStaticMap()) {
withStaticMaps.add(cache);
} else {
withoutStaticMaps.add(cache);
}
}
allCaches = Observable.concat(Observable.from(withoutStaticMaps), Observable.from(withStaticMaps));
} else {
allCaches = Observable.from(caches);
}
final Observable<Geocache> loaded = allCaches.flatMap(new Func1<Geocache, Observable<Geocache>>() {
@Override
public Observable<Geocache> call(final Geocache cache) {
return Observable.create(new OnSubscribe<Geocache>() {
@Override
public void call(final Subscriber<? super Geocache> subscriber) {
cache.refreshSynchronous(null);
detailProgress.incrementAndGet();
handler.obtainMessage(DownloadProgress.MSG_LOADED, cache).sendToTarget();
subscriber.onCompleted();
}
}).subscribeOn(AndroidRxUtils.refreshScheduler);
}
}).doOnCompleted(new Action0() {
@Override
public void call() {
handler.sendEmptyMessage(DownloadProgress.MSG_DONE);
}
});
handler.unsubscribeIfCancelled(loaded.subscribe());
}
private static final class DeleteCachesFromListCommand extends DeleteCachesCommand {
private final WeakReference<CacheListActivity> activityRef;
private final int lastListPosition;
public DeleteCachesFromListCommand(@NonNull final CacheListActivity context, final Collection<Geocache> caches) {
super(context, caches);
lastListPosition = context.getListView().getFirstVisiblePosition();
activityRef = new WeakReference<>(context);
}
@Override
public void onFinished() {
final CacheListActivity activity = activityRef.get();
if (activity != null) {
activity.adapter.setSelectMode(false);
activity.refreshCurrentList(AfterLoadAction.CHECK_IF_EMPTY);
activity.replaceCacheListFromSearch();
activity.getListView().setSelection(lastListPosition);
}
}
}
private static void clearOfflineLogs(final Handler handler, final List<Geocache> selectedCaches) {
Schedulers.io().createWorker().schedule(new Action0() {
@Override
public void call() {
DataStore.clearLogsOffline(selectedCaches);
handler.sendEmptyMessage(DownloadProgress.MSG_DONE);
}
});
}
private class MoreCachesListener implements View.OnClickListener {
@Override
public void onClick(final View arg0) {
showProgress(true);
showFooterLoadingCaches();
getSupportLoaderManager().restartLoader(CacheListLoaderType.NEXT_PAGE.getLoaderId(), null, CacheListActivity.this);
}
}
private void hideLoading() {
final ListView list = getListView();
if (list.getVisibility() == View.GONE) {
list.setVisibility(View.VISIBLE);
final View loading = findViewById(R.id.loading);
loading.setVisibility(View.GONE);
}
}
@NonNull
private Action1<Integer> getListSwitchingRunnable() {
return new Action1<Integer>() {
@Override
public void call(final Integer selectedListId) {
switchListById(selectedListId);
}
};
}
private void switchListById(final int id) {
switchListById(id, AfterLoadAction.NO_ACTION);
}
private void switchListById(final int id, @NonNull final AfterLoadAction action) {
if (id < 0) {
return;
}
final Bundle extras = new Bundle();
extras.putSerializable(BUNDLE_ACTION_KEY, action);
if (id == PseudoList.HISTORY_LIST.id) {
type = CacheListType.HISTORY;
getSupportLoaderManager().destroyLoader(CacheListType.OFFLINE.getLoaderId());
currentLoader = (AbstractSearchLoader) getSupportLoaderManager().restartLoader(CacheListType.HISTORY.getLoaderId(), extras, this);
} else {
if (id == PseudoList.ALL_LIST.id) {
listId = id;
title = res.getString(R.string.list_all_lists);
} else {
final StoredList list = DataStore.getList(id);
listId = list.id;
title = list.title;
}
type = CacheListType.OFFLINE;
getSupportLoaderManager().destroyLoader(CacheListType.HISTORY.getLoaderId());
extras.putAll(OfflineGeocacheListLoader.getBundleForList(listId));
currentLoader = (OfflineGeocacheListLoader) getSupportLoaderManager().restartLoader(CacheListType.OFFLINE.getLoaderId(), extras, this);
Settings.saveLastList(listId);
}
initAdapter();
showProgress(true);
showFooterLoadingCaches();
adapter.setSelectMode(false);
invalidateOptionsMenuCompatible();
}
private void renameList() {
(new RenameListCommand(this, listId) {
@Override
protected void onFinished() {
refreshCurrentList();
}
}).execute();
}
private void removeListInternal() {
new DeleteListCommand(this, listId) {
private String oldListName;
@Override
protected boolean canExecute() {
oldListName = DataStore.getList(listId).getTitle();
return super.canExecute();
}
@Override
protected void onFinished() {
refreshSpinnerAdapter();
switchListById(StoredList.STANDARD_LIST_ID);
}
@Override
protected void onFinishedUndo() {
refreshSpinnerAdapter();
for (final StoredList list : DataStore.getLists()) {
if (oldListName.equals(list.getTitle())) {
switchListById(list.id);
}
}
}
}.execute();
}
private void removeList() {
// if there are no caches on this list, don't bother the user with questions.
// there is no harm in deleting the list, he could recreate it easily
if (CollectionUtils.isEmpty(cacheList)) {
removeListInternal();
return;
}
// ask him, if there are caches on the list
Dialogs.confirm(this, R.string.list_dialog_remove_title, R.string.list_dialog_remove_description, R.string.list_dialog_remove, new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialog, final int whichButton) {
removeListInternal();
}
});
}
public void goMap() {
if (!cacheToShow()) {
return;
}
// apply filter settings (if there's a filter)
final SearchResult searchToUse = getFilteredSearch();
CGeoMap.startActivitySearch(this, searchToUse, title);
}
private void refreshCurrentList() {
refreshCurrentList(AfterLoadAction.NO_ACTION);
}
private void refreshCurrentList(@NonNull final AfterLoadAction action) {
refreshSpinnerAdapter();
switchListById(listId, action);
}
public static void startActivityOffline(final Context context) {
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.OFFLINE);
context.startActivity(cachesIntent);
}
public static void startActivityOwner(final Activity context, final String userName) {
if (!isValidUsername(context, userName)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.OWNER);
cachesIntent.putExtra(Intents.EXTRA_USERNAME, userName);
context.startActivity(cachesIntent);
}
private static boolean isValidUsername(final Activity context, final String username) {
if (StringUtils.isBlank(username)) {
ActivityMixin.showToast(context, R.string.warn_no_username);
return false;
}
return true;
}
public static void startActivityFinder(final Activity context, final String userName) {
if (!isValidUsername(context, userName)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.FINDER);
cachesIntent.putExtra(Intents.EXTRA_USERNAME, userName);
context.startActivity(cachesIntent);
}
private void prepareFilterBar() {
if (Settings.getCacheType() != CacheType.ALL || adapter.isFiltered()) {
final StringBuilder output = new StringBuilder(Settings.getCacheType().getL10n());
if (adapter.isFiltered()) {
output.append(", ").append(adapter.getFilterName());
}
final TextView filterTextView = ButterKnife.findById(this, R.id.filter_text);
filterTextView.setText(output.toString());
findViewById(R.id.filter_bar).setVisibility(View.VISIBLE);
}
else {
findViewById(R.id.filter_bar).setVisibility(View.GONE);
}
}
public static Intent getNearestIntent(final Activity context) {
return Intents.putListType(new Intent(context, CacheListActivity.class), CacheListType.NEAREST);
}
public static Intent getHistoryIntent(final Context context) {
return Intents.putListType(new Intent(context, CacheListActivity.class), CacheListType.HISTORY);
}
public static void startActivityAddress(final Context context, final Geopoint coords, final String address) {
final Intent addressIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(addressIntent, CacheListType.ADDRESS);
addressIntent.putExtra(Intents.EXTRA_COORDS, coords);
addressIntent.putExtra(Intents.EXTRA_ADDRESS, address);
context.startActivity(addressIntent);
}
/**
* start list activity, by searching around the given point.
*
* @param name
* name of coordinates, will lead to a title like "Around ..." instead of directly showing the
* coordinates as title
*/
public static void startActivityCoordinates(final AbstractActivity context, final Geopoint coords, @Nullable final String name) {
if (!isValidCoords(context, coords)) {
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.COORDINATE);
cachesIntent.putExtra(Intents.EXTRA_COORDS, coords);
if (StringUtils.isNotEmpty(name)) {
cachesIntent.putExtra(Intents.EXTRA_TITLE, context.getString(R.string.around, name));
}
context.startActivity(cachesIntent);
}
private static boolean isValidCoords(final AbstractActivity context, final Geopoint coords) {
if (coords == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_no_coordinates));
return false;
}
return true;
}
public static void startActivityKeyword(final AbstractActivity context, final String keyword) {
if (keyword == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_no_keyword));
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.KEYWORD);
cachesIntent.putExtra(Intents.EXTRA_KEYWORD, keyword);
context.startActivity(cachesIntent);
}
public static void startActivityMap(final Context context, final SearchResult search) {
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
cachesIntent.putExtra(Intents.EXTRA_SEARCH, search);
Intents.putListType(cachesIntent, CacheListType.MAP);
context.startActivity(cachesIntent);
}
public static void startActivityPocket(final AbstractActivity context, final @NonNull PocketQueryList pq) {
final String guid = pq.getGuid();
if (guid == null) {
context.showToast(CgeoApplication.getInstance().getString(R.string.warn_pocket_query_select));
return;
}
final Intent cachesIntent = new Intent(context, CacheListActivity.class);
Intents.putListType(cachesIntent, CacheListType.POCKET);
cachesIntent.putExtra(Intents.EXTRA_NAME, pq.getName());
cachesIntent.putExtra(Intents.EXTRA_POCKET_GUID, guid);
context.startActivity(cachesIntent);
}
// Loaders
@Override
public Loader<SearchResult> onCreateLoader(final int type, final Bundle extras) {
if (type >= CacheListLoaderType.values().length) {
throw new IllegalArgumentException("invalid loader type " + type);
}
final CacheListLoaderType enumType = CacheListLoaderType.values()[type];
AbstractSearchLoader loader = null;
switch (enumType) {
case OFFLINE:
// open either the requested or the last list
if (extras.containsKey(Intents.EXTRA_LIST_ID)) {
listId = extras.getInt(Intents.EXTRA_LIST_ID);
} else {
listId = Settings.getLastList();
}
if (listId == PseudoList.ALL_LIST.id) {
title = res.getString(R.string.list_all_lists);
} else if (listId <= StoredList.TEMPORARY_LIST.id) {
listId = StoredList.STANDARD_LIST_ID;
title = res.getString(R.string.stored_caches_button);
} else {
final StoredList list = DataStore.getList(listId);
// list.id may be different if listId was not valid
if (list.id != listId) {
showToast(getString(R.string.list_not_available));
}
listId = list.id;
title = list.title;
}
loader = new OfflineGeocacheListLoader(getBaseContext(), coords, listId);
break;
case HISTORY:
title = res.getString(R.string.caches_history);
listId = PseudoList.HISTORY_LIST.id;
loader = new HistoryGeocacheListLoader(app, coords);
break;
case NEAREST:
title = res.getString(R.string.caches_nearby);
loader = new CoordsGeocacheListLoader(app, coords);
break;
case COORDINATE:
title = coords.toString();
loader = new CoordsGeocacheListLoader(app, coords);
break;
case KEYWORD:
final String keyword = extras.getString(Intents.EXTRA_KEYWORD);
title = listNameMemento.rememberTerm(keyword);
loader = new KeywordGeocacheListLoader(app, keyword);
break;
case ADDRESS:
final String address = extras.getString(Intents.EXTRA_ADDRESS);
if (StringUtils.isNotBlank(address)) {
title = listNameMemento.rememberTerm(address);
} else {
title = coords.toString();
}
loader = new CoordsGeocacheListLoader(app, coords);
break;
case FINDER:
final String username = extras.getString(Intents.EXTRA_USERNAME);
title = listNameMemento.rememberTerm(username);
loader = new FinderGeocacheListLoader(app, username);
break;
case OWNER:
final String ownerName = extras.getString(Intents.EXTRA_USERNAME);
title = listNameMemento.rememberTerm(ownerName);
loader = new OwnerGeocacheListLoader(app, ownerName);
break;
case MAP:
//TODO Build Null loader
title = res.getString(R.string.map_map);
search = (SearchResult) extras.get(Intents.EXTRA_SEARCH);
replaceCacheListFromSearch();
loadCachesHandler.sendMessage(Message.obtain());
break;
case NEXT_PAGE:
loader = new NextPageGeocacheListLoader(app, search);
break;
case POCKET:
final String guid = extras.getString(Intents.EXTRA_POCKET_GUID);
title = extras.getString(Intents.EXTRA_NAME);
loader = new PocketGeocacheListLoader(app, guid);
break;
}
// if there is a title given in the activity start request, use this one instead of the default
if (extras != null && StringUtils.isNotBlank(extras.getString(Intents.EXTRA_TITLE))) {
title = extras.getString(Intents.EXTRA_TITLE);
}
if (loader != null && extras != null && extras.getSerializable(BUNDLE_ACTION_KEY) != null) {
final AfterLoadAction action = (AfterLoadAction) extras.getSerializable(BUNDLE_ACTION_KEY);
loader.setAfterLoadAction(action);
}
updateTitle();
showProgress(true);
showFooterLoadingCaches();
if (loader != null) {
loader.setRecaptchaHandler(new RecaptchaHandler(this, loader));
}
return loader;
}
@Override
public void onLoadFinished(final Loader<SearchResult> arg0, final SearchResult searchIn) {
// The database search was moved into the UI call intentionally. If this is done before the runOnUIThread,
// then we have 2 sets of caches in memory. This can lead to OOM for huge cache lists.
if (searchIn != null) {
cacheList.clear();
final Set<Geocache> cachesFromSearchResult = searchIn.getCachesFromSearchResult(LoadFlags.LOAD_CACHE_OR_DB);
cacheList.addAll(cachesFromSearchResult);
search = searchIn;
updateAdapter();
updateTitle();
showFooterMoreCaches();
}
showProgress(false);
hideLoading();
invalidateOptionsMenuCompatible();
if (arg0 instanceof AbstractSearchLoader) {
switch (((AbstractSearchLoader) arg0).getAfterLoadAction()) {
case CHECK_IF_EMPTY:
checkIfEmptyAndRemoveAfterConfirm();
break;
case NO_ACTION:
break;
}
}
}
@Override
public void onLoaderReset(final Loader<SearchResult> arg0) {
//Not interesting
}
/**
* Allow the title bar spinner to show the same subtitle like the activity itself would show.
*
*/
public CharSequence getCacheListSubtitle(@NonNull final AbstractList list) {
// if this is the current list, be aware of filtering
if (list.id == listId) {
return getCurrentSubtitle();
}
// otherwise return the overall number
final int numberOfCaches = list.getNumberOfCaches();
if (numberOfCaches < 0) {
return StringUtils.EMPTY;
}
return getCacheNumberString(getResources(), numberOfCaches);
}
/**
* Calculate the subtitle of the current list depending on (optional) filters.
*
*/
private CharSequence getCurrentSubtitle() {
if (search == null) {
return getCacheNumberString(getResources(), 0);
}
final StringBuilder result = new StringBuilder();
if (adapter.isFiltered()) {
result.append(adapter.getCount()).append('/');
}
result.append(getCacheNumberString(getResources(), search.getCount()));
return result.toString();
}
@Override
public ShowcaseViewBuilder getShowcase() {
if (mCacheListSpinnerAdapter != null) {
return new ShowcaseViewBuilder(this)
.setTarget(new ActionViewTarget(this, Type.SPINNER))
.setContent(R.string.showcase_cachelist_title, R.string.showcase_cachelist_text);
}
return null;
}
/**
* Used to indicate if an action should be taken after the AbstractSearchLoader has finished
*/
public enum AfterLoadAction {
/** Take no action */
NO_ACTION,
/** Check if the list is empty and prompt for deletion */
CHECK_IF_EMPTY
}
}
| Fix #5312: Count caches in Datastore
Count the caches directly in the database to avoid problems with
filtered lists.
| main/src/cgeo/geocaching/CacheListActivity.java | Fix #5312: Count caches in Datastore | <ide><path>ain/src/cgeo/geocaching/CacheListActivity.java
<ide>
<ide> private void checkIfEmptyAndRemoveAfterConfirm() {
<ide> final boolean isNonDefaultList = isConcreteList() && listId != StoredList.STANDARD_LIST_ID;
<del> if (isNonDefaultList && CollectionUtils.isEmpty(cacheList)) {
<add> // Check local cacheList first, and Datastore only if needed (because of filtered lists)
<add> // Checking is done in this order for performance reasons
<add> if (isNonDefaultList && CollectionUtils.isEmpty(cacheList)
<add> && DataStore.getAllStoredCachesCount(CacheType.ALL, listId) == 0) {
<ide> // ask user, if he wants to delete the now empty list
<ide> Dialogs.confirmYesNo(this, R.string.list_dialog_remove_title, R.string.list_dialog_remove_nowempty, new DialogInterface.OnClickListener() {
<ide> @Override |
|
Java | mit | 45ce9f9053826298c56ea93a494cbce5b00d3dc5 | 0 | ctisutep/trimmer | package trimmer;
import java.awt.EventQueue;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JProgressBar;
import javax.swing.JButton;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.Color;
import java.awt.SystemColor;
import javax.swing.UIManager;
import javax.swing.JSeparator;
import javax.swing.JLabel;
import java.awt.Font;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader;
import javax.swing.SwingConstants;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.swing.AbstractAction;
import javax.swing.JTextArea;
import javax.swing.JMenuBar;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.SpinnerNumberModel;
import javax.swing.SwingWorker.StateValue;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JSpinner;
public class Frontend {
private JFrame frame;
int padding = 0;
int skip = 1;
JButton fincvgfile;
JButton dirBtn;
JButton strBtn;
JLabel prgLbl;
JTextArea textArea = new JTextArea();
JSpinner spinner;
JSpinner spinner2;
JButton resBtn = new JButton("View Results");
JCheckBoxMenuItem chckbxmntmMaximal;
JCheckBoxMenuItem chckbxmntmDynamic;
TrimmerDynamic sub2;
TrimmerMinMax sub1;
int progress = 0;
String[] country;
File outputDir;
BufferedReader br;
File[] files;
JButton stopButton;
private String[][] array;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
//set the main window
Frontend window = new Frontend();
window.frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public Frontend() {
initialize();
}
/**
* Initialize the contents of the frame.
* @throws IOException
*/
public static void set(JTextArea a, File file) throws IOException
{
//TODO: erase this method, I think it isn't used
String things = "";
String curr = "";
String line = "";
String cvsSplitBy = ",";
String[] b = null;
BufferedReader br = null;
PrintWriter writer = new PrintWriter("the-file-name.txt");
br = new BufferedReader(new FileReader(file));
while ((curr = br.readLine()) != null)
{
writer.println(curr);
// use comma as separator
b = curr.split(cvsSplitBy);
for (int i = 0; i < b.length; i++)
{
if (i % 2 == 1)
{
line += b[i] + " ";
}
else
{
things += b[i] + " ";
}
a.setText(things + "\n" + line);
}
}
writer.close();
br.close();
}
public static String[] text(File file) throws IOException, FileNotFoundException
{
//TODO: also not used method
String line = "";
String cvsSplitBy = ",";
String[] country = null;
BufferedReader br = null;
br = new BufferedReader(new FileReader(file));
while ((line = br.readLine()) != null)
{
// use comma as separator
country = line.split(cvsSplitBy);
}
br.close();
return (country);
}
private void initialize() {
//this method is called to start the main window.
frame = new JFrame();
//the size is fixed for now
frame.setBounds(100, 100, 315, 488);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
final JProgressBar progressBar = new JProgressBar();
//for some reason this needs to be final on some java versions
progressBar.setForeground(new Color(0, 128, 128));
progressBar.setBounds(10, 320, 279, 42);
//start the progress at 0
progressBar.setValue(0);
frame.getContentPane().add(progressBar);
//create the button to select the file
fincvgfile = new JButton("Select Final Coverage File/s");
fincvgfile.setBackground(SystemColor.textHighlightText);
fincvgfile.setForeground(Color.BLACK);
//set the action of the button when it is pressed
fincvgfile.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
//this button only works if you have selected a method type. (max or dynamic)
//TODO: add a warning if it is clicked without selecting method
if (chckbxmntmDynamic.isSelected() || chckbxmntmMaximal.isSelected())
{
progressBar.setValue(10);
JFileChooser chooser = new JFileChooser();
//can select multiple files
chooser.setMultiSelectionEnabled(true);
//accepts csv's and dat files
FileNameExtensionFilter filter = new FileNameExtensionFilter(
"CSV", "CSV", "dat");
chooser.setFileFilter(filter);
int returnVal = chooser.showOpenDialog(fincvgfile);
if (returnVal == chooser.APPROVE_OPTION)
{
//all files are stored in an array of files
files = chooser.getSelectedFiles();
//once files are selected, you can select the output directory
dirBtn.setEnabled(true);
}
}
}
});
fincvgfile.setBounds(45, 25, 196, 50);
frame.getContentPane().add(fincvgfile);
//output button starts out disabled
dirBtn = new JButton("Select Output Folder");
dirBtn.setEnabled(false);
dirBtn.setBackground(SystemColor.textHighlightText);
dirBtn.setForeground(Color.BLACK);
dirBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
//Once it is clickd, update progress
progressBar.setValue(30);
//select the directory of the output, starts in the directory of the original file
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(files[0]);
chooser.setDialogTitle("Output");
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
//
// disable the "All files" option.
//
chooser.setAcceptAllFileFilterUsed(false);
//
if (chooser.showOpenDialog(dirBtn) == JFileChooser.APPROVE_OPTION) {
//once it is selected we are ready to start
outputDir = chooser.getSelectedFile();
strBtn.setEnabled(true);
}
else {
System.out.println("No Selection ");
}
}
});
dirBtn.setBounds(45, 93, 196, 50);
frame.getContentPane().add(dirBtn);
//start button also starts out disabled
strBtn = new JButton("Start");
strBtn.setEnabled(false);
strBtn.setBackground(SystemColor.textHighlightText);
strBtn.setForeground(Color.BLACK);
strBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
//disable all other buttons once the process has started
fincvgfile.setEnabled(false);
dirBtn.setEnabled(false);
strBtn.setEnabled(false);
progress = 40;
//number of spaces before and after a selection
padding = ((Double) (spinner.getValue())).intValue();
//skip is the number of lines at the beginning that don't contain data
skip = ((Double) (spinner2.getValue())).intValue();
progressBar.setValue(progress);
//add a reset button to restart or cancel the progress
resBtn.setEnabled(true);
//detect which of the two methods will be used.
if (chckbxmntmMaximal.isSelected())
{
try
{
/*
* both methods are objects of a swingworker class that does the work on another
* thread so the main GUI doesnt hang or get stuck. this class also has methods
* to get live progress and to cancel the operation
*/
sub1 = new TrimmerMinMax(files, outputDir, prgLbl);
//execute calls the doInBackground in the other class.
sub1.execute();
//this listener looks at updates generated by the process method in the other class
sub1.addPropertyChangeListener(new PropertyChangeListener()
{
@Override
public void propertyChange(final PropertyChangeEvent event)
{
switch (event.getPropertyName())
{
case "progress":
progressBar.setIndeterminate(false);
//progressBar.setValue(40 + ((Integer)(event.getNewValue()) ) / 60);
break;
case "state":
switch ((StateValue) event.getNewValue())
{
case DONE:
progressBar.setValue(100);
stopButton.setVisible(true);
break;
case STARTED:
case PENDING:
progressBar.setVisible(true);
progressBar.setIndeterminate(true);
break;
}
break;
}
}
});
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
//basically does the same but with the other method
else if (chckbxmntmDynamic.isSelected())
{
try {
sub2 = new TrimmerDynamic(files, outputDir, prgLbl, padding, skip);
sub2.execute();
sub2.addPropertyChangeListener(new PropertyChangeListener()
{
@Override
public void propertyChange(final PropertyChangeEvent event)
{
switch (event.getPropertyName())
{
case "progress":
progressBar.setIndeterminate(false);
//progressBar.setValue(40 + ((Integer)(event.getNewValue()) ) / 60);
break;
case "state":
switch ((StateValue) event.getNewValue())
{
case DONE:
progressBar.setValue(100);
stopButton.setVisible(true);
break;
case STARTED:
case PENDING:
progressBar.setVisible(true);
progressBar.setIndeterminate(true);
break;
}
break;
}
}
});
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
});
strBtn.setBounds(45, 154, 196, 50);
frame.getContentPane().add(strBtn);
JSeparator separator = new JSeparator();
separator.setBounds(45, 308, 200, 14);
frame.getContentPane().add(separator);
JSeparator separator_1 = new JSeparator();
separator_1.setBounds(45, 215, 200, 14);
frame.getContentPane().add(separator_1);
//this label is passed to the other classes to change the text while the file is being processed
prgLbl = new JLabel("...");
prgLbl.setHorizontalAlignment(SwingConstants.CENTER);
prgLbl.setFont(new Font("Tahoma", Font.PLAIN, 11));
prgLbl.setBounds(45, 234, 196, 63);
frame.getContentPane().add(prgLbl);
stopButton = new JButton(new AbstractAction("Reset") {
@Override
public void actionPerformed(ActionEvent arg0) {
fincvgfile.setEnabled(true);
progressBar.setValue(0);
prgLbl.setText("...");
stopButton.setVisible(false);
files = null;
//closeFile cancels the operation if it is still in progress
if (chckbxmntmMaximal.isSelected())
{
sub1.closeFile();
}
else
sub2.closeFile();
chckbxmntmMaximal.setSelected(false);
chckbxmntmDynamic.setSelected(false);
}
});
//not sure if this is used
stopButton.setBounds(103, 381, 89, 23);
stopButton.setVisible(false);
frame.getContentPane().add(stopButton);
JMenuBar menuBar = new JMenuBar();
frame.setJMenuBar(menuBar);
JMenu mnNewMenu = new JMenu("Method");
menuBar.add(mnNewMenu);
chckbxmntmMaximal = new JCheckBoxMenuItem("Maximal");
mnNewMenu.add(chckbxmntmMaximal);
chckbxmntmDynamic = new JCheckBoxMenuItem("Dynamic");
mnNewMenu.add(chckbxmntmDynamic);
//number of files doesn't actually need to be specified
//TODO: remove this or change it to another function
JLabel lblFiles = new JLabel("# of Files");
menuBar.add(lblFiles);
SpinnerNumberModel model1 = new SpinnerNumberModel(1.0, 1.0, 10.0, 1.0);
spinner = new JSpinner(model1);
menuBar.add(spinner);
JLabel lblSkip = new JLabel("Every n Cycle");
menuBar.add(lblSkip);
SpinnerNumberModel model2 = new SpinnerNumberModel(1.0, 1.0, 500.0, 1.0);
spinner2 = new JSpinner(model2);
menuBar.add(spinner2);
//set the checkboxes to act as radio buttons
chckbxmntmDynamic.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
if (chckbxmntmMaximal.isSelected())
{
chckbxmntmMaximal.setSelected(false);
}
}
});
chckbxmntmMaximal.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
if (chckbxmntmDynamic.isSelected())
{
chckbxmntmDynamic.setSelected(false);
}
}
});
}
}
| Trimmer/src/trimmer/Frontend.java | package trimmer;
import java.awt.EventQueue;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JProgressBar;
import javax.swing.JButton;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.Color;
import java.awt.SystemColor;
import javax.swing.UIManager;
import javax.swing.JSeparator;
import javax.swing.JLabel;
import java.awt.Font;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Reader;
import javax.swing.SwingConstants;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.swing.AbstractAction;
import javax.swing.JTextArea;
import javax.swing.JMenuBar;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.SpinnerNumberModel;
import javax.swing.SwingWorker.StateValue;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.JSpinner;
public class Frontend {
private JFrame frame;
int padding = 0;
int skip = 1;
JButton fincvgfile;
JButton dirBtn;
JButton strBtn;
JLabel prgLbl;
JTextArea textArea = new JTextArea();
JSpinner spinner;
JSpinner spinner2;
JButton resBtn = new JButton("View Results");
JCheckBoxMenuItem chckbxmntmMaximal;
JCheckBoxMenuItem chckbxmntmDynamic;
TrimmerDynamic sub2;
TrimmerMinMax sub1;
int progress = 0;
String[] country;
File outputDir;
BufferedReader br;
File[] files;
JButton stopButton;
private String[][] array;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Frontend window = new Frontend();
window.frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public Frontend() {
initialize();
}
/**
* Initialize the contents of the frame.
* @throws IOException
*/
public static void set(JTextArea a, File file) throws IOException
{
String things = "";
String curr = "";
String line = "";
String cvsSplitBy = ",";
String[] b = null;
BufferedReader br = null;
PrintWriter writer = new PrintWriter("the-file-name.txt");
br = new BufferedReader(new FileReader(file));
while ((curr = br.readLine()) != null)
{
writer.println(curr);
// use comma as separator
b = curr.split(cvsSplitBy);
for (int i = 0; i < b.length; i++)
{
if (i % 2 == 1)
{
line += b[i] + " ";
}
else
{
things += b[i] + " ";
}
a.setText(things + "\n" + line);
}
}
writer.close();
br.close();
}
public static String[] text(File file) throws IOException, FileNotFoundException
{
String line = "";
String cvsSplitBy = ",";
String[] country = null;
BufferedReader br = null;
br = new BufferedReader(new FileReader(file));
while ((line = br.readLine()) != null)
{
// use comma as separator
country = line.split(cvsSplitBy);
}
br.close();
return (country);
}
private void initialize() {
frame = new JFrame();
frame.setBounds(100, 100, 315, 488);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
JProgressBar progressBar = new JProgressBar();
progressBar.setForeground(new Color(0, 128, 128));
progressBar.setBounds(10, 320, 279, 42);
progressBar.setValue(0);
frame.getContentPane().add(progressBar);
fincvgfile = new JButton("Select Final Coverage File/s");
fincvgfile.setBackground(SystemColor.textHighlightText);
fincvgfile.setForeground(Color.BLACK);
fincvgfile.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
if (chckbxmntmDynamic.isSelected() || chckbxmntmMaximal.isSelected())
{
progressBar.setValue(10);
JFileChooser chooser = new JFileChooser();
chooser.setMultiSelectionEnabled(true);
FileNameExtensionFilter filter = new FileNameExtensionFilter(
"CSV", "CSV", "dat");
chooser.setFileFilter(filter);
int returnVal = chooser.showOpenDialog(fincvgfile);
if (returnVal == chooser.APPROVE_OPTION)
{
files = chooser.getSelectedFiles();
dirBtn.setEnabled(true);
}
}
}
});
fincvgfile.setBounds(45, 25, 196, 50);
frame.getContentPane().add(fincvgfile);
dirBtn = new JButton("Select Output Folder");
dirBtn.setEnabled(false);
dirBtn.setBackground(SystemColor.textHighlightText);
dirBtn.setForeground(Color.BLACK);
dirBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
strBtn.setEnabled(true);
progressBar.setValue(30);
progress = 30;
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(files[0]);
chooser.setDialogTitle("Output");
chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
//
// disable the "All files" option.
//
chooser.setAcceptAllFileFilterUsed(false);
//
if (chooser.showOpenDialog(dirBtn) == JFileChooser.APPROVE_OPTION) {
outputDir = chooser.getSelectedFile();
}
else {
System.out.println("No Selection ");
}
}
});
dirBtn.setBounds(45, 93, 196, 50);
frame.getContentPane().add(dirBtn);
strBtn = new JButton("Start");
strBtn.setEnabled(false);
strBtn.setBackground(SystemColor.textHighlightText);
strBtn.setForeground(Color.BLACK);
strBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
fincvgfile.setEnabled(false);
dirBtn.setEnabled(false);
strBtn.setEnabled(false);
progress = 40;
padding = ((Double) (spinner.getValue())).intValue();
//number of spaces before and after a selection
skip = ((Double) (spinner2.getValue())).intValue();
progressBar.setValue(progress);
resBtn.setEnabled(true);
if (chckbxmntmMaximal.isSelected())
{
try
{
sub1 = new TrimmerMinMax(files, outputDir, prgLbl);
sub1.execute();
sub1.addPropertyChangeListener(new PropertyChangeListener()
{
@Override
public void propertyChange(final PropertyChangeEvent event)
{
switch (event.getPropertyName())
{
case "progress":
progressBar.setIndeterminate(false);
//progressBar.setValue(40 + ((Integer)(event.getNewValue()) ) / 60);
break;
case "state":
switch ((StateValue) event.getNewValue())
{
case DONE:
progressBar.setValue(100);
stopButton.setVisible(true);
break;
case STARTED:
case PENDING:
progressBar.setVisible(true);
progressBar.setIndeterminate(true);
break;
}
break;
}
}
});
}
catch (IOException e1)
{
e1.printStackTrace();
}
}
else if (chckbxmntmDynamic.isSelected())
{
try {
sub2 = new TrimmerDynamic(files, outputDir, prgLbl, padding, skip);
sub2.execute();
sub2.addPropertyChangeListener(new PropertyChangeListener()
{
@Override
public void propertyChange(final PropertyChangeEvent event)
{
switch (event.getPropertyName())
{
case "progress":
progressBar.setIndeterminate(false);
//progressBar.setValue(40 + ((Integer)(event.getNewValue()) ) / 60);
break;
case "state":
switch ((StateValue) event.getNewValue())
{
case DONE:
progressBar.setValue(100);
stopButton.setVisible(true);
break;
case STARTED:
case PENDING:
progressBar.setVisible(true);
progressBar.setIndeterminate(true);
break;
}
break;
}
}
});
} catch (IOException e1) {
e1.printStackTrace();
}
}
}
});
strBtn.setBounds(45, 154, 196, 50);
frame.getContentPane().add(strBtn);
JSeparator separator = new JSeparator();
separator.setBounds(45, 308, 200, 14);
frame.getContentPane().add(separator);
JSeparator separator_1 = new JSeparator();
separator_1.setBounds(45, 215, 200, 14);
frame.getContentPane().add(separator_1);
prgLbl = new JLabel("...");
prgLbl.setHorizontalAlignment(SwingConstants.CENTER);
prgLbl.setFont(new Font("Tahoma", Font.PLAIN, 11));
prgLbl.setBounds(45, 234, 196, 63);
frame.getContentPane().add(prgLbl);
stopButton = new JButton(new AbstractAction("Reset") {
@Override
public void actionPerformed(ActionEvent arg0) {
fincvgfile.setEnabled(true);
progressBar.setValue(0);
prgLbl.setText("...");
stopButton.setVisible(false);
files = null;
if (chckbxmntmMaximal.isSelected())
{
sub1.closeFile();
}
else
sub2.closeFile();
chckbxmntmMaximal.setSelected(false);
chckbxmntmDynamic.setSelected(false);
}
});
stopButton.setBounds(103, 381, 89, 23);
stopButton.setVisible(false);
frame.getContentPane().add(stopButton);
JMenuBar menuBar = new JMenuBar();
frame.setJMenuBar(menuBar);
JMenu mnNewMenu = new JMenu("Method");
menuBar.add(mnNewMenu);
chckbxmntmMaximal = new JCheckBoxMenuItem("Maximal");
mnNewMenu.add(chckbxmntmMaximal);
chckbxmntmDynamic = new JCheckBoxMenuItem("Dynamic");
mnNewMenu.add(chckbxmntmDynamic);
JLabel lblFiles = new JLabel("# of Files");
menuBar.add(lblFiles);
SpinnerNumberModel model1 = new SpinnerNumberModel(1.0, 1.0, 10.0, 1.0);
spinner = new JSpinner(model1);
menuBar.add(spinner);
JLabel lblSkip = new JLabel("Every n Cycle");
menuBar.add(lblSkip);
SpinnerNumberModel model2 = new SpinnerNumberModel(1.0, 1.0, 500.0, 1.0);
spinner2 = new JSpinner(model2);
menuBar.add(spinner2);
chckbxmntmDynamic.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
if (chckbxmntmMaximal.isSelected())
{
chckbxmntmMaximal.setSelected(false);
}
}
});
chckbxmntmMaximal.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent ae){
if (chckbxmntmDynamic.isSelected())
{
chckbxmntmDynamic.setSelected(false);
}
}
});
}
}
| Added comments to frontend | Trimmer/src/trimmer/Frontend.java | Added comments to frontend | <ide><path>rimmer/src/trimmer/Frontend.java
<ide> EventQueue.invokeLater(new Runnable() {
<ide> public void run() {
<ide> try {
<add> //set the main window
<ide> Frontend window = new Frontend();
<ide> window.frame.setVisible(true);
<ide> } catch (Exception e) {
<ide> */
<ide> public static void set(JTextArea a, File file) throws IOException
<ide> {
<add> //TODO: erase this method, I think it isn't used
<ide> String things = "";
<ide> String curr = "";
<ide> String line = "";
<ide> }
<ide> public static String[] text(File file) throws IOException, FileNotFoundException
<ide> {
<add> //TODO: also not used method
<ide> String line = "";
<ide> String cvsSplitBy = ",";
<ide> String[] country = null;
<ide> return (country);
<ide> }
<ide> private void initialize() {
<add> //this method is called to start the main window.
<ide> frame = new JFrame();
<add> //the size is fixed for now
<ide> frame.setBounds(100, 100, 315, 488);
<ide> frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
<ide> frame.getContentPane().setLayout(null);
<ide>
<del> JProgressBar progressBar = new JProgressBar();
<add> final JProgressBar progressBar = new JProgressBar();
<add> //for some reason this needs to be final on some java versions
<ide> progressBar.setForeground(new Color(0, 128, 128));
<ide> progressBar.setBounds(10, 320, 279, 42);
<add> //start the progress at 0
<ide> progressBar.setValue(0);
<ide> frame.getContentPane().add(progressBar);
<ide>
<del>
<add> //create the button to select the file
<ide> fincvgfile = new JButton("Select Final Coverage File/s");
<ide> fincvgfile.setBackground(SystemColor.textHighlightText);
<ide> fincvgfile.setForeground(Color.BLACK);
<add> //set the action of the button when it is pressed
<ide> fincvgfile.addActionListener(new ActionListener() {
<ide> public void actionPerformed(ActionEvent arg0) {
<add> //this button only works if you have selected a method type. (max or dynamic)
<add> //TODO: add a warning if it is clicked without selecting method
<ide> if (chckbxmntmDynamic.isSelected() || chckbxmntmMaximal.isSelected())
<ide> {
<ide> progressBar.setValue(10);
<ide> JFileChooser chooser = new JFileChooser();
<add> //can select multiple files
<ide> chooser.setMultiSelectionEnabled(true);
<add> //accepts csv's and dat files
<ide> FileNameExtensionFilter filter = new FileNameExtensionFilter(
<ide> "CSV", "CSV", "dat");
<ide> chooser.setFileFilter(filter);
<ide> int returnVal = chooser.showOpenDialog(fincvgfile);
<ide> if (returnVal == chooser.APPROVE_OPTION)
<ide> {
<add> //all files are stored in an array of files
<ide> files = chooser.getSelectedFiles();
<add> //once files are selected, you can select the output directory
<ide> dirBtn.setEnabled(true);
<ide> }
<ide> }
<ide> });
<ide> fincvgfile.setBounds(45, 25, 196, 50);
<ide> frame.getContentPane().add(fincvgfile);
<del>
<add>
<add> //output button starts out disabled
<ide> dirBtn = new JButton("Select Output Folder");
<ide> dirBtn.setEnabled(false);
<ide> dirBtn.setBackground(SystemColor.textHighlightText);
<ide> dirBtn.setForeground(Color.BLACK);
<ide> dirBtn.addActionListener(new ActionListener() {
<ide> public void actionPerformed(ActionEvent e) {
<del> strBtn.setEnabled(true);
<add> //Once it is clickd, update progress
<ide> progressBar.setValue(30);
<del> progress = 30;
<add> //select the directory of the output, starts in the directory of the original file
<ide> JFileChooser chooser = new JFileChooser();
<ide> chooser.setCurrentDirectory(files[0]);
<ide> chooser.setDialogTitle("Output");
<ide> chooser.setAcceptAllFileFilterUsed(false);
<ide> //
<ide> if (chooser.showOpenDialog(dirBtn) == JFileChooser.APPROVE_OPTION) {
<add> //once it is selected we are ready to start
<ide> outputDir = chooser.getSelectedFile();
<add> strBtn.setEnabled(true);
<ide> }
<ide> else {
<ide> System.out.println("No Selection ");
<ide> });
<ide> dirBtn.setBounds(45, 93, 196, 50);
<ide> frame.getContentPane().add(dirBtn);
<del>
<add>
<add> //start button also starts out disabled
<ide> strBtn = new JButton("Start");
<ide> strBtn.setEnabled(false);
<ide> strBtn.setBackground(SystemColor.textHighlightText);
<ide> strBtn.setForeground(Color.BLACK);
<ide> strBtn.addActionListener(new ActionListener() {
<ide> public void actionPerformed(ActionEvent e) {
<add> //disable all other buttons once the process has started
<ide> fincvgfile.setEnabled(false);
<ide> dirBtn.setEnabled(false);
<ide> strBtn.setEnabled(false);
<ide> progress = 40;
<add> //number of spaces before and after a selection
<ide> padding = ((Double) (spinner.getValue())).intValue();
<del> //number of spaces before and after a selection
<add> //skip is the number of lines at the beginning that don't contain data
<ide> skip = ((Double) (spinner2.getValue())).intValue();
<ide> progressBar.setValue(progress);
<add> //add a reset button to restart or cancel the progress
<ide> resBtn.setEnabled(true);
<add> //detect which of the two methods will be used.
<ide> if (chckbxmntmMaximal.isSelected())
<ide> {
<ide> try
<ide> {
<add> /*
<add> * both methods are objects of a swingworker class that does the work on another
<add> * thread so the main GUI doesnt hang or get stuck. this class also has methods
<add> * to get live progress and to cancel the operation
<add> */
<ide> sub1 = new TrimmerMinMax(files, outputDir, prgLbl);
<add> //execute calls the doInBackground in the other class.
<ide> sub1.execute();
<add> //this listener looks at updates generated by the process method in the other class
<ide> sub1.addPropertyChangeListener(new PropertyChangeListener()
<ide> {
<ide> @Override
<ide> e1.printStackTrace();
<ide> }
<ide> }
<add> //basically does the same but with the other method
<ide> else if (chckbxmntmDynamic.isSelected())
<ide> {
<ide> try {
<ide> JSeparator separator_1 = new JSeparator();
<ide> separator_1.setBounds(45, 215, 200, 14);
<ide> frame.getContentPane().add(separator_1);
<del>
<add>
<add> //this label is passed to the other classes to change the text while the file is being processed
<ide> prgLbl = new JLabel("...");
<ide> prgLbl.setHorizontalAlignment(SwingConstants.CENTER);
<ide> prgLbl.setFont(new Font("Tahoma", Font.PLAIN, 11));
<ide> prgLbl.setText("...");
<ide> stopButton.setVisible(false);
<ide> files = null;
<add> //closeFile cancels the operation if it is still in progress
<ide> if (chckbxmntmMaximal.isSelected())
<ide> {
<ide> sub1.closeFile();
<ide> }
<ide>
<ide> });
<add> //not sure if this is used
<ide> stopButton.setBounds(103, 381, 89, 23);
<ide> stopButton.setVisible(false);
<ide> frame.getContentPane().add(stopButton);
<ide> chckbxmntmDynamic = new JCheckBoxMenuItem("Dynamic");
<ide> mnNewMenu.add(chckbxmntmDynamic);
<ide>
<add> //number of files doesn't actually need to be specified
<add> //TODO: remove this or change it to another function
<ide> JLabel lblFiles = new JLabel("# of Files");
<ide> menuBar.add(lblFiles);
<ide>
<ide> spinner2 = new JSpinner(model2);
<ide> menuBar.add(spinner2);
<ide>
<del>
<add> //set the checkboxes to act as radio buttons
<ide> chckbxmntmDynamic.addActionListener(new ActionListener(){
<ide> public void actionPerformed(ActionEvent ae){
<ide> if (chckbxmntmMaximal.isSelected()) |
|
Java | apache-2.0 | b5e3eb98dd0f0976e48bd2690b01e0f7b08ec177 | 0 | brandt/GridSphere,brandt/GridSphere | /*
* @author <a href="mailto:[email protected]">Oliver Wehrens</a>
* @version $Id: Page.java 4496 2006-02-08 20:27:04Z wehrens $
*/
package org.gridsphere.layout.view.brush;
import org.gridsphere.layout.PortletComponent;
import org.gridsphere.layout.PortletPage;
import org.gridsphere.layout.view.BaseRender;
import org.gridsphere.layout.view.Render;
import org.gridsphere.portlet.impl.SportletProperties;
import org.gridsphere.portletcontainer.GridSphereEvent;
import javax.portlet.PortletRequest;
import java.awt.*;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
public class Page extends BaseRender implements Render {
/**
* Constructs an instance of PortletPage
*/
public Page() {
}
public StringBuffer doStart(GridSphereEvent event, PortletComponent component) {
PortletRequest req = event.getRenderRequest();
StringBuffer page = new StringBuffer();
PortletPage portletPage = (PortletPage) component;
// page header
Locale locale = req.getLocale();
ComponentOrientation orientation = ComponentOrientation.getOrientation(locale);
// page.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
page.append("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" ");
page.append("\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">");
if (orientation.isLeftToRight()) {
page.append("\n<html");
} else {
page.append("\n<html dir=\"rtl\"");
}
page.append(" xmlns=\"http://www.w3.org/1999/xhtml\">");
page.append("\n<!-- GridSphere Release: ").append(SportletProperties.getInstance().getProperty("gridsphere.release")).append("-->");
page.append("\n\t<head>");
page.append("\n\t<title>").append(portletPage.getTitle()).append("</title>");
page.append("\n\t<meta name='keywords' content='").append(portletPage.getKeywords()).append("' />");
page.append("\n\t<meta http-equiv=\"Pragma\" content=\"no-cache\" />");
page.append("\n\t<meta http-equiv=\"Expires\" content=\"-1\"/>");
if (portletPage.getRefresh() > 0) page.append("\n\t<meta http-equiv=\"refresh\" content=\"").append(portletPage.getRefresh()).append("\"/>");
String theme = (String)req.getPortletSession().getAttribute(SportletProperties.LAYOUT_THEME);
page.append("\n\t<link type=\"text/css\" href=\"").append(req.getContextPath()).append("/themes/").append(portletPage.getRenderKit()).append("/").append(theme).append("/css" + "/default.css\" rel=\"stylesheet\"/>");
// Add portlet defined stylesheet if defined
Map props = (Map) req.getAttribute(SportletProperties.PORTAL_PROPERTIES);
if (props != null) {
Object cssHrefObj = props.get("CSS_HREF");
if ((cssHrefObj != null) && (cssHrefObj instanceof List)) {
List cssHref = (List) cssHrefObj;
Iterator it = cssHref.iterator();
while (it.hasNext()) {
String cssLink = (String) it.next();
if (cssLink != null) {
page.append("\n\t<link type=\"text/css\" href=\"").append(cssLink).append("\" rel=\"stylesheet\"/>");
}
}
}
}
page.append("\n\t<link rel=\"icon\" href=\"").append(req.getContextPath()).append("/").append(portletPage.getIcon()).append("\" type=\"image/x-icon\"/>");
page.append("\n\t<link rel=\"shortcut icon\" href=\"").append(req.getContextPath()).append("/").append(portletPage.getIcon()).append("\" type=\"image/x-icon\"/>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/gridsphere.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/validation.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/yahoo/yahoo.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/yahoo/connection.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/gridsphere_ajax.js\"></script>");
if (props != null) {
Object jsObj = props.get("JAVASCRIPT_SRC");
if ((jsObj != null) && (jsObj instanceof java.util.List)) {
java.util.List jsSrc = (java.util.List) jsObj;
Iterator it = jsSrc.iterator();
while (it.hasNext()) {
String jsLink = (String) it.next();
if (jsLink != null) {
page.append("\n\t<script type=\"text/javascript\" src=\"").append(jsLink).append("\"></script>");
}
}
}
}
page.append("\n\t</head>\n\t");
page.append("<body");
if (props != null) {
Object bodyOnLoadObj = props.get("BODY_ONLOAD");
if ((bodyOnLoadObj != null) && (bodyOnLoadObj instanceof java.util.List)) {
java.util.List onLoad = (java.util.List) bodyOnLoadObj;
Iterator it = onLoad.iterator();
page.append(" onload=");
while (it.hasNext()) {
String onLoadFunc = (String) it.next();
if (onLoadFunc != null) {
page.append(onLoadFunc);
}
}
}
}
page.append(">\n<div id=\"gridsphere-layout-page\">\n");
return page;
}
public StringBuffer doEnd(GridSphereEvent event, PortletComponent comp) {
StringBuffer end = new StringBuffer("\n</div> <!-- gridsphere-layout-page -->\n");
/*
StringBuffer pagebuffer = (StringBuffer)event.getRenderRequest().getAttribute(SportletProperties.PAGE_BUFFER);
if (pagebuffer != null) {
end.append(pagebuffer);
}
*/
end.append("</body>\n</html>\n");
return end;
}
}
| src/org/gridsphere/layout/view/brush/Page.java | /*
* @author <a href="mailto:[email protected]">Oliver Wehrens</a>
* @version $Id: Page.java 4496 2006-02-08 20:27:04Z wehrens $
*/
package org.gridsphere.layout.view.brush;
import org.gridsphere.layout.PortletComponent;
import org.gridsphere.layout.PortletPage;
import org.gridsphere.layout.view.BaseRender;
import org.gridsphere.layout.view.Render;
import org.gridsphere.portlet.impl.SportletProperties;
import org.gridsphere.portletcontainer.GridSphereEvent;
import javax.portlet.PortletRequest;
import java.awt.*;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
public class Page extends BaseRender implements Render {
/**
* Constructs an instance of PortletPage
*/
public Page() {
}
public StringBuffer doStart(GridSphereEvent event, PortletComponent component) {
PortletRequest req = event.getRenderRequest();
StringBuffer page = new StringBuffer();
PortletPage portletPage = (PortletPage) component;
// page header
Locale locale = req.getLocale();
ComponentOrientation orientation = ComponentOrientation.getOrientation(locale);
// page.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
page.append("<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" ");
page.append("\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">");
if (orientation.isLeftToRight()) {
page.append("\n<html");
} else {
page.append("\n<html dir=\"rtl\"");
}
page.append(" xmlns=\"http://www.w3.org/1999/xhtml\">");
page.append("\n<!-- GridSphere Release: " + SportletProperties.getInstance().getProperty("gridsphere.release") + "-->");
page.append("\n\t<head>");
page.append("\n\t<title>").append(portletPage.getTitle()).append("</title>");
page.append("\n\t<meta name='keywords' content='").append(portletPage.getKeywords()).append("' />");
page.append("\n\t<meta http-equiv=\"Pragma\" content=\"no-cache\" />");
if (portletPage.getRefresh() > 0)
page.append("\n\t<meta http-equiv=\"refresh\" content=\"").append(portletPage.getRefresh()).append("\"/>");
String theme = (String)req.getPortletSession().getAttribute(SportletProperties.LAYOUT_THEME);
page.append("\n\t<link type=\"text/css\" href=\"").append(req.getContextPath()).append("/themes/").append(portletPage.getRenderKit()).append("/").append(theme).append("/css" + "/default.css\" rel=\"stylesheet\"/>");
// Add portlet defined stylesheet if defined
Map props = (Map) req.getAttribute(SportletProperties.PORTAL_PROPERTIES);
if (props != null) {
Object cssHrefObj = props.get("CSS_HREF");
if ((cssHrefObj != null) && (cssHrefObj instanceof List)) {
List cssHref = (List) cssHrefObj;
Iterator it = cssHref.iterator();
while (it.hasNext()) {
String cssLink = (String) it.next();
if (cssLink != null) {
page.append("\n\t<link type=\"text/css\" href=\"").append(cssLink).append("\" rel=\"stylesheet\"/>");
}
}
}
}
page.append("\n\t<link rel=\"icon\" href=\"").append(req.getContextPath()).append("/").append(portletPage.getIcon()).append("\" type=\"image/x-icon\"/>");
page.append("\n\t<link rel=\"shortcut icon\" href=\"").append(req.getContextPath()).append("/").append(portletPage.getIcon()).append("\" type=\"image/x-icon\"/>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/gridsphere.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/validation.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/yahoo/yahoo.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/yahoo/connection.js\"></script>");
page.append("\n\t<script type=\"text/javascript\" src=\"").append(req.getContextPath()).append("/javascript/gridsphere_ajax.js\"></script>");
if (props != null) {
Object jsObj = props.get("JAVASCRIPT_SRC");
if ((jsObj != null) && (jsObj instanceof java.util.List)) {
java.util.List jsSrc = (java.util.List) jsObj;
Iterator it = jsSrc.iterator();
while (it.hasNext()) {
String jsLink = (String) it.next();
if (jsLink != null) {
page.append("\n\t<script type=\"text/javascript\" src=\"").append(jsLink).append("\"></script>");
}
}
}
}
page.append("\n\t</head>\n\t");
page.append("<body");
if (props != null) {
Object bodyOnLoadObj = props.get("BODY_ONLOAD");
if ((bodyOnLoadObj != null) && (bodyOnLoadObj instanceof java.util.List)) {
java.util.List onLoad = (java.util.List) bodyOnLoadObj;
Iterator it = onLoad.iterator();
page.append(" onload=");
while (it.hasNext()) {
String onLoadFunc = (String) it.next();
if (onLoadFunc != null) {
page.append(onLoadFunc);
}
}
}
}
page.append(">\n<div id=\"gridsphere-layout-page\">\n");
return page;
}
public StringBuffer doEnd(GridSphereEvent event, PortletComponent comp) {
StringBuffer end = new StringBuffer("\n</div> <!-- gridsphere-layout-page -->\n");
/*
StringBuffer pagebuffer = (StringBuffer)event.getRenderRequest().getAttribute(SportletProperties.PAGE_BUFFER);
if (pagebuffer != null) {
end.append(pagebuffer);
}
*/
end.append("</body>\n</html>\n");
return end;
}
}
| add expiration to make sure its not caching
git-svn-id: 616481d960d639df1c769687dde8737486ca2a9a@5719 9c99c85f-4d0c-0410-8460-a9a1c48a3a7f
| src/org/gridsphere/layout/view/brush/Page.java | add expiration to make sure its not caching | <ide><path>rc/org/gridsphere/layout/view/brush/Page.java
<ide> page.append("\n<html dir=\"rtl\"");
<ide> }
<ide> page.append(" xmlns=\"http://www.w3.org/1999/xhtml\">");
<del> page.append("\n<!-- GridSphere Release: " + SportletProperties.getInstance().getProperty("gridsphere.release") + "-->");
<add> page.append("\n<!-- GridSphere Release: ").append(SportletProperties.getInstance().getProperty("gridsphere.release")).append("-->");
<ide> page.append("\n\t<head>");
<ide> page.append("\n\t<title>").append(portletPage.getTitle()).append("</title>");
<ide>
<ide> page.append("\n\t<meta name='keywords' content='").append(portletPage.getKeywords()).append("' />");
<ide> page.append("\n\t<meta http-equiv=\"Pragma\" content=\"no-cache\" />");
<del> if (portletPage.getRefresh() > 0)
<del> page.append("\n\t<meta http-equiv=\"refresh\" content=\"").append(portletPage.getRefresh()).append("\"/>");
<add> page.append("\n\t<meta http-equiv=\"Expires\" content=\"-1\"/>");
<add>
<add> if (portletPage.getRefresh() > 0) page.append("\n\t<meta http-equiv=\"refresh\" content=\"").append(portletPage.getRefresh()).append("\"/>");
<ide> String theme = (String)req.getPortletSession().getAttribute(SportletProperties.LAYOUT_THEME);
<ide> page.append("\n\t<link type=\"text/css\" href=\"").append(req.getContextPath()).append("/themes/").append(portletPage.getRenderKit()).append("/").append(theme).append("/css" + "/default.css\" rel=\"stylesheet\"/>");
<ide> |
|
Java | lgpl-2.1 | 898106ae7217996836cd5349e44b93b0e3bfb7b9 | 0 | jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,ACS-Community/ACS,ACS-Community/ACS,csrg-utfsm/acscb,jbarriosc/ACSUFRO,csrg-utfsm/acscb,csrg-utfsm/acscb,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,ACS-Community/ACS,csrg-utfsm/acscb,ACS-Community/ACS,jbarriosc/ACSUFRO,csrg-utfsm/acscb,csrg-utfsm/acscb,ACS-Community/ACS,jbarriosc/ACSUFRO,ACS-Community/ACS,csrg-utfsm/acscb,csrg-utfsm/acscb,ACS-Community/ACS,ACS-Community/ACS,jbarriosc/ACSUFRO | /*******************************************************************************
* ALMA - Atacama Large Millimeter Array
* Copyright (c) ESO - European Southern Observatory, 2011
* (in the framework of the ALMA collaboration).
* All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*******************************************************************************/
package acs.benchmark.util;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import si.ijs.maci.ComponentSpec;
import alma.ACS.ACSComponentOperations;
import alma.JavaContainerError.wrappers.AcsJContainerServicesEx;
import alma.acs.component.ComponentLifecycle;
import alma.acs.component.ComponentLifecycleException;
import alma.acs.container.ContainerServices;
import alma.acs.container.ContainerServices.ComponentReleaseCallback;
import alma.acs.container.ContainerServices.ComponentReleaseCallbackWithLogging;
import alma.acs.logging.AcsLogLevel;
/**
* A reflection-based convenience class that retrieves components either from the container services
* or from its own component cache.
* <p>
* Features:
* <ul>
* <li>It restricts the interface to the IDL-derived "xxxOperations" class in order to allow
* unit testing with mock components or actual component impl classes, without requiring container/corba support.
* <li>Hides the Corba helper class that performs the "narrow". Together with the above point, corba is then completely hidden.
* <li>(TODO) It optionally installs a transparent client-side interceptor that detects runtime problems.
* <li>(TODO) Configurable timeout disconnect from components, with transparent reconnect.
* </ul>
* @TODO: To identify containers/hosts, use collocated dummy target components instead of the deprecated
* {@link ContainerServices#getDynamicComponent(ComponentSpec, boolean)}.
* <p>
* This class is thread-safe, so that a client can activate or release several components at once.
*/
public class ComponentAccessUtil {
protected final Logger logger;
protected final ContainerServices contSrv;
/**
* Component reference plus synchronization support for other threads
* that request the same component reference but must wait until the shared component
* has been activated.
*/
protected static class CompRefHelper {
ACSComponentOperations compRef;
final CountDownLatch activationSync;
boolean isReleasing;
CompRefHelper() {
this.activationSync = new CountDownLatch(1);
isReleasing = false;
}
/**
* Should be called by the first thread that requested the component,
* to unblock the other threads.
*/
void setCompRef(ACSComponentOperations compRef) {
this.compRef = compRef;
activationSync.countDown();
}
boolean awaitCompActivation(long timeout, TimeUnit unit) throws InterruptedException {
return activationSync.await(timeout, unit);
}
}
/**
* key = component instance name,
* value = component reference + sync support for other threads that request the same component.
* <p>
* This map must be protected from concurrent access.
*/
protected final Map<String, CompRefHelper> compName2Comp;
public ComponentAccessUtil(ContainerServices contSrv) {
this.contSrv = contSrv;
this.logger = contSrv.getLogger();
this.compName2Comp = new HashMap<String, CompRefHelper>();
}
/**
* Returns the cached reference or the component retrieved from the container services
* @param <T> The IDL-derived component interface
* @param compSpec
* @param idlOpInterface
* @return
* @throws AcsJContainerServicesEx
*/
public <T extends ACSComponentOperations> T getDynamicComponent(ComponentSpec compSpec, Class<T> idlOpInterface)
throws AcsJContainerServicesEx {
T comp = null;
boolean foundInCache = false;
boolean mustActivateComp = false;
CompRefHelper compRefHelper = null;
synchronized (compName2Comp) {
// try the cache first
compRefHelper = compName2Comp.get(compSpec.component_name);
if (compRefHelper == null) {
mustActivateComp = true;
compRefHelper = new CompRefHelper();
compName2Comp.put(compSpec.component_name, compRefHelper);
}
else {
if (compRefHelper.compRef != null) {
if (compRefHelper.isReleasing) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Component '" + compSpec.component_name + "' is being released and thus cannot be activated.");
throw ex;
}
foundInCache = true;
comp = idlOpInterface.cast(compRefHelper.compRef);
}
}
} // must keep this synchronized block short, to not prevent parallel activation of different components!
try {
if (!foundInCache) {
if (mustActivateComp) {
comp = getDynamicComponentFromContainerServices(compSpec, idlOpInterface);
compRefHelper.setCompRef(comp); // this will free other threads waiting for the same comp.
}
else {
// wait if another thread is already activating this comp
boolean waitOK = false;
try {
waitOK = compRefHelper.awaitCompActivation(5, TimeUnit.MINUTES);
} catch (InterruptedException ex) {
// just leave waitOK = false
}
if (!waitOK) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Timed out or got interrupted while waiting for activation of component '" +
compSpec.component_name + "'.");
throw ex;
}
}
}
return comp;
} finally {
logger.log(AcsLogLevel.DEBUG, "Retrieved component '" + compSpec.component_name + "' from "
+ (foundInCache ? "cache." : "container services.") );
}
}
/**
* Gets the component via {@link #contSrv}, without using the cache.
* Can be overridden for tests, to bypass the container services.
* @throws AcsJContainerServicesEx
*/
protected <T extends ACSComponentOperations> T getDynamicComponentFromContainerServices(ComponentSpec compSpec, Class<T> idlOpInterface)
throws AcsJContainerServicesEx {
// infer the corba helper class
Class<?> corbaHelperClass = null;
try {
int classBaseNameEnd = idlOpInterface.getName().lastIndexOf("Operations");
String classBaseName = idlOpInterface.getName().substring(0, classBaseNameEnd);
corbaHelperClass = Class.forName(classBaseName + "Helper");
} catch (Exception ex) {
String msg = "Failed to find Corba Helper class matching " + idlOpInterface.getName();
logger.log(Level.FINE, msg, ex);
throw new IllegalArgumentException(msg, ex); // TODO throw better ex
}
org.omg.CORBA.Object compRaw = contSrv.getDynamicComponent(compSpec, false);
// narrow the component reference
Object comp = null;
try {
Method narrowMethod = corbaHelperClass.getMethod("narrow", org.omg.CORBA.Object.class);
comp = narrowMethod.invoke(null, compRaw);
} catch (Exception ex) {
String msg = "Failed to Corba-narrow component " + compSpec.component_name;
logger.log(Level.FINE, msg, ex);
throw new IllegalArgumentException(msg, ex); // TODO throw better ex
}
if (!idlOpInterface.isInstance(comp)) {
String msg = "Narrowed component " + compSpec.component_name + " is not of type " + idlOpInterface;
logger.log(Level.FINE, msg);
throw new IllegalArgumentException(msg); // TODO throw better ex
}
return idlOpInterface.cast(comp);
}
public List<ACSComponentOperations> getCachedComponents() {
List<ACSComponentOperations> ret = new ArrayList<ACSComponentOperations>();
synchronized (compName2Comp) {
for (CompRefHelper compRefHelper : compName2Comp.values()) {
ret.add(compRefHelper.compRef);
}
}
return ret;
}
/**
* Should be called when a component is no longer needed.
*
* @param waitForCompRelease If <code>true</code> this method waits for the complete component release
* otherwise returns immediately
*
*/
public void releaseComponent(String compName, boolean waitForCompRelease) {
// Mark this comp in the cache
synchronized (compName2Comp) {
CompRefHelper compRefHelper = compName2Comp.get(compName);
if (compRefHelper != null) {
compRefHelper.isReleasing = true;
}
}
ComponentReleaseCallback callback = new ComponentReleaseCallbackWithLogging(logger, AcsLogLevel.DEBUG);
contSrv.releaseComponent(compName, callback);
try {
if (waitForCompRelease) {
boolean waitOK = callback.awaitComponentRelease(60, TimeUnit.SECONDS);
if (!waitOK) {
logger.warning("Timed out (60s) waiting for release of component " + compName);
}
}
} catch (InterruptedException ex) {
logger.log(AcsLogLevel.DEBUG, "Interrupted while waiting for release of component " + compName);
}
// remove comp reference from the cache
synchronized (compName2Comp) {
compName2Comp.remove(compName);
}
}
/**
* Should be called when no component are needed any more.
* @TODO: Offer also "waitForCompRelease" flag. Probably best to
* move support for both parallel component activation and release
* to another class such as "ConcurrentComponentAccessUtil"
* which maintains a thread pool and can
* - take a list of component names and return list of activated components
* (check if this gets more confusing than helpful!)
* - release several components in parallel
* - perhaps also take explicit list of components to be released "releaseComponents(list)"
*
* @param waitForCompsRelease If <code>true</code> this method waits for the complete components release
* otherwise returns immediately
*/
public void releaseAllComponents(boolean waitForCompsRelease) {
List<String> compNames;
synchronized (compName2Comp) {
compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
}
releaseComponents(compNames, waitForCompsRelease);
}
/**
* Release a set of components.
*
* @param componentNames The name of the components to release
* @param waitForCompsRelease If <code>true</code> this method waits for the complete components release
* otherwise returns immediately
*/
public void releaseComponents(Collection<String> componentNames, boolean waitForCompsRelease) {
if (componentNames==null || componentNames.isEmpty()) {
// Nothing to do
return;
}
for (String compName : componentNames) {
// @TODO if waitForCompRelease, still we should not call releaseComponent(compName, false)
// because this releases components only sequentially, while they should be release in parallel
releaseComponent(compName, waitForCompsRelease);
}
}
public void logInfo()
{
int nElem = compName2Comp.size();
String message = "Components in the list: " + nElem + " (";
List<String> compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
for (String compName : compNames) {
message = message + compName + " ";
}
logger.info(message + ")");
}
/**
* Convenience method for subclasses created by unit tests,
* that override {@link #getComponentFromContainerServices(String, Class)}
* and need to create and initialize component implementation classes locally inside the test.
*/
protected <T, U extends ACSComponentOperations & ComponentLifecycle>
T initCompImpl(U compImpl, Class<T> idlOpInterface)
throws ComponentLifecycleException {
compImpl.initialize(contSrv);
compImpl.execute();
return idlOpInterface.cast(compImpl);
}
}
| Benchmark/util/src/acs/benchmark/util/ComponentAccessUtil.java | /*******************************************************************************
* ALMA - Atacama Large Millimeter Array
* Copyright (c) ESO - European Southern Observatory, 2011
* (in the framework of the ALMA collaboration).
* All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*******************************************************************************/
package acs.benchmark.util;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import si.ijs.maci.ComponentSpec;
import alma.ACS.ACSComponentOperations;
import alma.JavaContainerError.wrappers.AcsJContainerServicesEx;
import alma.acs.component.ComponentLifecycle;
import alma.acs.component.ComponentLifecycleException;
import alma.acs.container.ContainerServices;
import alma.acs.container.ContainerServices.ComponentReleaseCallback;
import alma.acs.container.ContainerServices.ComponentReleaseCallbackWithLogging;
import alma.acs.logging.AcsLogLevel;
/**
* A reflection-based convenience class that retrieves components either from the container services
* or from its own component cache.
* <p>
* Features:
* <ul>
* <li>It restricts the interface to the IDL-derived "xxxOperations" class in order to allow
* unit testing with mock components or actual component impl classes, without requiring container/corba support.
* <li>Hides the Corba helper class that performs the "narrow". Together with the above point, corba is then completely hidden.
* <li>(TODO) It optionally installs a transparent client-side interceptor that detects runtime problems.
* <li>(TODO) Configurable timeout disconnect from components, with transparent reconnect.
* </ul>
* @TODO: To identify containers/hosts, use collocated dummy target components instead of the deprecated
* {@link ContainerServices#getDynamicComponent(ComponentSpec, boolean)}.
* <p>
* This class is thread-safe, so that a client can activate or release several components at once.
*/
public class ComponentAccessUtil {
protected final Logger logger;
protected final ContainerServices contSrv;
/**
* Component reference plus synchronization support for other threads
* that request the same component reference but must wait until the shared component
* has been activated.
*/
protected static class CompRefHelper {
ACSComponentOperations compRef;
final CountDownLatch activationSync;
boolean isReleasing;
CompRefHelper() {
this.activationSync = new CountDownLatch(1);
isReleasing = false;
}
/**
* Should be called by the first thread that requested the component,
* to unblock the other threads.
*/
void setCompRef(ACSComponentOperations compRef) {
this.compRef = compRef;
activationSync.countDown();
}
boolean awaitCompActivation(long timeout, TimeUnit unit) throws InterruptedException {
return activationSync.await(timeout, unit);
}
}
/**
* key = component instance name,
* value = component reference + sync support for other threads that request the same component.
* <p>
* This map must be protected from concurrent access.
*/
protected final Map<String, CompRefHelper> compName2Comp;
public ComponentAccessUtil(ContainerServices contSrv) {
this.contSrv = contSrv;
this.logger = contSrv.getLogger();
this.compName2Comp = new HashMap<String, CompRefHelper>();
}
/**
* Returns the cached reference or the component retrieved from the container services
* @param <T> The IDL-derived component interface
* @param compSpec
* @param idlOpInterface
* @return
* @throws AcsJContainerServicesEx
*/
public <T extends ACSComponentOperations> T getDynamicComponent(ComponentSpec compSpec, Class<T> idlOpInterface)
throws AcsJContainerServicesEx {
T comp = null;
boolean foundInCache = false;
boolean mustActivateComp = false;
CompRefHelper compRefHelper = null;
synchronized (compName2Comp) {
// try the cache first
compRefHelper = compName2Comp.get(compSpec.component_name);
if (compRefHelper == null) {
mustActivateComp = true;
compRefHelper = new CompRefHelper();
compName2Comp.put(compSpec.component_name, compRefHelper);
}
else {
if (compRefHelper.compRef != null) {
if (compRefHelper.isReleasing) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Component '" + compSpec.component_name + "' is being released and thus cannot be activated.");
throw ex;
}
foundInCache = true;
comp = idlOpInterface.cast(compRefHelper.compRef);
}
}
} // must keep this synchronized block short, to not prevent parallel activation of different components!
try {
if (!foundInCache) {
if (mustActivateComp) {
comp = getDynamicComponentFromContainerServices(compSpec, idlOpInterface);
compRefHelper.setCompRef(comp); // this will free other threads waiting for the same comp.
}
else {
// wait if another thread is already activating this comp
boolean waitOK = false;
try {
waitOK = compRefHelper.awaitCompActivation(5, TimeUnit.MINUTES);
} catch (InterruptedException ex) {
// just leave waitOK = false
}
if (!waitOK) {
AcsJContainerServicesEx ex = new AcsJContainerServicesEx();
ex.setContextInfo("Timed out or got interrupted while waiting for activation of component '" +
compSpec.component_name + "'.");
throw ex;
}
}
}
return comp;
} finally {
logger.log(AcsLogLevel.DEBUG, "Retrieved component '" + compSpec.component_name + "' from "
+ (foundInCache ? "cache." : "container services.") );
}
}
/**
* Gets the component via {@link #contSrv}, without using the cache.
* Can be overridden for tests, to bypass the container services.
* @throws AcsJContainerServicesEx
*/
protected <T extends ACSComponentOperations> T getDynamicComponentFromContainerServices(ComponentSpec compSpec, Class<T> idlOpInterface)
throws AcsJContainerServicesEx {
// infer the corba helper class
Class<?> corbaHelperClass = null;
try {
int classBaseNameEnd = idlOpInterface.getName().lastIndexOf("Operations");
String classBaseName = idlOpInterface.getName().substring(0, classBaseNameEnd);
corbaHelperClass = Class.forName(classBaseName + "Helper");
} catch (Exception ex) {
String msg = "Failed to find Corba Helper class matching " + idlOpInterface.getName();
logger.log(Level.FINE, msg, ex);
throw new IllegalArgumentException(msg, ex); // TODO throw better ex
}
org.omg.CORBA.Object compRaw = contSrv.getDynamicComponent(compSpec, false);
// narrow the component reference
Object comp = null;
try {
Method narrowMethod = corbaHelperClass.getMethod("narrow", org.omg.CORBA.Object.class);
comp = narrowMethod.invoke(null, compRaw);
} catch (Exception ex) {
String msg = "Failed to Corba-narrow component " + compSpec.component_name;
logger.log(Level.FINE, msg, ex);
throw new IllegalArgumentException(msg, ex); // TODO throw better ex
}
if (!idlOpInterface.isInstance(comp)) {
String msg = "Narrowed component " + compSpec.component_name + " is not of type " + idlOpInterface;
logger.log(Level.FINE, msg);
throw new IllegalArgumentException(msg); // TODO throw better ex
}
return idlOpInterface.cast(comp);
}
public List<ACSComponentOperations> getCachedComponents() {
List<ACSComponentOperations> ret = new ArrayList<ACSComponentOperations>();
synchronized (compName2Comp) {
for (CompRefHelper compRefHelper : compName2Comp.values()) {
ret.add(compRefHelper.compRef);
}
}
return ret;
}
/**
* Should be called when a component is no longer needed.
* <p>
* This call waits for the complete component release to finish and only then returns.
*
*/
public void releaseComponent(String compName, boolean waitForCompRelease) {
// Mark this comp in the cache
synchronized (compName2Comp) {
CompRefHelper compRefHelper = compName2Comp.get(compName);
if (compRefHelper != null) {
compRefHelper.isReleasing = true;
}
}
ComponentReleaseCallback callback = new ComponentReleaseCallbackWithLogging(logger, AcsLogLevel.DEBUG);
contSrv.releaseComponent(compName, callback);
try {
if (waitForCompRelease) {
boolean waitOK = callback.awaitComponentRelease(60, TimeUnit.SECONDS);
if (!waitOK) {
logger.warning("Timed out (60s) waiting for release of component " + compName);
}
}
} catch (InterruptedException ex) {
logger.log(AcsLogLevel.DEBUG, "Interrupted while waiting for release of component " + compName);
}
// remove comp reference from the cache
synchronized (compName2Comp) {
compName2Comp.remove(compName);
}
}
/**
* Should be called when no component are needed any more.
* @TODO: Offer also "waitForCompRelease" flag. Probably best to
* move support for both parallel component activation and release
* to another class such as "ConcurrentComponentAccessUtil"
* which maintains a thread pool and can
* - take a list of component names and return list of activated components
* (check if this gets more confusing than helpful!)
* - release several components in parallel
* - perhaps also take explicit list of components to be released "releaseComponents(list)"
*/
public void releaseAllComponents() {
synchronized (compName2Comp) {
List<String> compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
for (String compName : compNames) {
// @TODO if waitForCompRelease, still we should not call releaseComponent(compName, false)
// because this releases components only sequentially, while they should be release in parallel
releaseComponent(compName, false);
}
}
}
public void logInfo()
{
int nElem = compName2Comp.size();
String message = "Components in the list: " + nElem + " (";
List<String> compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
for (String compName : compNames) {
message = message + compName + " ";
}
logger.info(message + ")");
}
/**
* Convenience method for subclasses created by unit tests,
* that override {@link #getComponentFromContainerServices(String, Class)}
* and need to create and initialize component implementation classes locally inside the test.
*/
protected <T, U extends ACSComponentOperations & ComponentLifecycle>
T initCompImpl(U compImpl, Class<T> idlOpInterface)
throws ComponentLifecycleException {
compImpl.initialize(contSrv);
compImpl.execute();
return idlOpInterface.cast(compImpl);
}
}
| Added a module to release a list of components.
Shorten the critical section into releaseAllComponents()
git-svn-id: afcf11d89342f630bd950d18a70234a9e277d909@166726 523d945c-050c-4681-91ec-863ad3bb968a
| Benchmark/util/src/acs/benchmark/util/ComponentAccessUtil.java | Added a module to release a list of components. Shorten the critical section into releaseAllComponents() | <ide><path>enchmark/util/src/acs/benchmark/util/ComponentAccessUtil.java
<ide>
<ide> import java.lang.reflect.Method;
<ide> import java.util.ArrayList;
<add>import java.util.Collection;
<ide> import java.util.HashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide>
<ide> /**
<ide> * Should be called when a component is no longer needed.
<del> * <p>
<del> * This call waits for the complete component release to finish and only then returns.
<add> *
<add> * @param waitForCompRelease If <code>true</code> this method waits for the complete component release
<add> * otherwise returns immediately
<ide> *
<ide> */
<ide> public void releaseComponent(String compName, boolean waitForCompRelease) {
<ide> * (check if this gets more confusing than helpful!)
<ide> * - release several components in parallel
<ide> * - perhaps also take explicit list of components to be released "releaseComponents(list)"
<del> */
<del> public void releaseAllComponents() {
<del> synchronized (compName2Comp) {
<del> List<String> compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
<del> for (String compName : compNames) {
<del> // @TODO if waitForCompRelease, still we should not call releaseComponent(compName, false)
<del> // because this releases components only sequentially, while they should be release in parallel
<del> releaseComponent(compName, false);
<del> }
<add> *
<add> * @param waitForCompsRelease If <code>true</code> this method waits for the complete components release
<add> * otherwise returns immediately
<add> */
<add> public void releaseAllComponents(boolean waitForCompsRelease) {
<add> List<String> compNames;
<add> synchronized (compName2Comp) {
<add> compNames = new ArrayList<String>(compName2Comp.keySet()); // to avoid ConcurrentModificationException
<add> }
<add> releaseComponents(compNames, waitForCompsRelease);
<add> }
<add>
<add> /**
<add> * Release a set of components.
<add> *
<add> * @param componentNames The name of the components to release
<add> * @param waitForCompsRelease If <code>true</code> this method waits for the complete components release
<add> * otherwise returns immediately
<add> */
<add> public void releaseComponents(Collection<String> componentNames, boolean waitForCompsRelease) {
<add> if (componentNames==null || componentNames.isEmpty()) {
<add> // Nothing to do
<add> return;
<add> }
<add> for (String compName : componentNames) {
<add> // @TODO if waitForCompRelease, still we should not call releaseComponent(compName, false)
<add> // because this releases components only sequentially, while they should be release in parallel
<add> releaseComponent(compName, waitForCompsRelease);
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | 89bbed544284e4497c3490e15e4443cadab26d1c | 0 | jaredhanson/node-tokens | /**
* Module dependencies.
*/
var moment = require('moment')
, jws = require('jws')
, NotValidError = require('../errors/notvaliderror');
/**
* Decode a structured access token.
*
* References:
* - [Structured Access Token for Sharing Authorization Grant between a Resource Server and an Authorization Server](http://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01)
* - [JSON Web Token (JWT)](http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-14)
*/
module.exports = function(options, keying) {
if (typeof options == 'function') {
keying = options;
options = undefined;
}
options = options || {};
if (!keying) { throw new TypeError('SAT decoding requires a keying callback'); }
var audience = options.audience || [];
if (!Array.isArray(audience)) {
audience = [ audience ];
}
return function sat(data, opts, cb) {
if (typeof opts == 'function') {
cb = opts;
opts = undefined;
}
opts = opts || {};
var aliases = audience;
if (opts.audience) {
aliases = audience.concat(opts.audience);
}
// Decode the JWT so the header and payload are available, as they contain
// fields needed to find the corresponding key. Note that at this point, the
// assertion has not actually been verified. It will be verified later, after
// the keying material has been retrieved.
var token = jws.decode(data, { json: true });
if (!token) { return cb(); }
var header = token.header
, payload = token.payload;
// Validate the assertion.
// https://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01#section-3
// https://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01#section-4
// http://tools.ietf.org/html/draft-ietf-oauth-jwt-bearer-07#section-3
if (!payload.iss) { return cb(new NotValidError('Token missing required claim: iss')); }
if (!payload.sub) { return cb(new NotValidError('Token missing required claim: sub')); }
if (!payload.aud) { return cb(new NotValidError('Token missing required claim: aud')); }
if (!payload.exp) { return cb(new NotValidError('Token missing required claim: exp')); }
var aud = payload.aud;
if (!Array.isArray(aud)) {
aud = [ aud ];
}
var aok = aliases.some(function(a) { return aud.indexOf(a) != -1 });
if (!aok) {
return cb(new NotValidError('Token not intended for this audience'));
}
var now = Math.floor(Date.now() / 1000);
if (payload.exp < now) { // expired
return cb(new NotValidError('Token expired'));
}
if (payload.nbf && payload.nbf > now) { // not yet acceptable
return cb(new NotValidError('Token not yet acceptable'));
}
function keyed(err, key) {
if (err) { return cb(err); }
var ok = jws.verify(data, key);
if (!ok) {
return cb(new NotValidError('Token signature invalid'));
}
var claims = {};
claims.issuer = payload.iss;
claims.subject = payload.sub;
claims.audience = aud;
claims.expiresAt = moment.unix(payload.exp).toDate();
if (payload.azp) { claims.authorizedPresenter = payload.azp; }
if (payload.scope) {
// NOTE: "scope" is not defined as a claim by the SAT specification.
// However, it is widely needed when making authorization
// decisions, and is parsed here as a convienience. The parsing
// is in accordance with established industry conventions, as set
// by Google, IETF drafts, and others.
//
// References:
// - https://developers.google.com/accounts/docs/OAuth2ServiceAccount
// - http://tools.ietf.org/html/draft-richer-oauth-introspection-04
claims.scope = payload.scope.split(' ');
}
return cb(null, claims);
}
try {
var arity = keying.length;
if (arity == 3) {
keying(payload.iss, header, keyed);
} else { // arity == 2
keying(payload.iss, keyed);
}
} catch (ex) {
return cb(ex);
}
};
};
| lib/decode/sat.js | /**
* Module dependencies.
*/
var moment = require('moment')
, jws = require('jws')
, NotValidError = require('../errors/notvaliderror');
/**
* Decode a structured access token.
*
* References:
* - [Structured Access Token for Sharing Authorization Grant between a Resource Server and an Authorization Server](http://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01)
* - [JSON Web Token (JWT)](http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-14)
*/
module.exports = function(options, keying) {
if (typeof options == 'function') {
keying = options;
options = undefined;
}
options = options || {};
if (!keying) { throw new TypeError('SAT decoding requires a keying callback'); }
var audience = options.audience || [];
if (!Array.isArray(audience)) {
audience = [ audience ];
}
return function sat(data, opts, cb) {
if (typeof opts == 'function') {
cb = opts;
opts = undefined;
}
opts = opts || {};
// Decode the JWT so the header and payload are available, as they contain
// fields needed to find the corresponding key. Note that at this point, the
// assertion has not actually been verified. It will be verified later, after
// the keying material has been retrieved.
var token = jws.decode(data, { json: true });
if (!token) { return cb(); }
var aliases = audience;
if (opts.audience) {
aliases = audience.concat(opts.audience);
}
var header = token.header
, payload = token.payload;
// Validate the assertion.
// https://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01#section-3
// https://tools.ietf.org/html/draft-sakimura-oidc-structured-token-01#section-4
// http://tools.ietf.org/html/draft-ietf-oauth-jwt-bearer-07#section-3
if (!payload.iss) { return cb(new NotValidError('Token missing required claim: iss')); }
if (!payload.sub) { return cb(new NotValidError('Token missing required claim: sub')); }
if (!payload.aud) { return cb(new NotValidError('Token missing required claim: aud')); }
if (!payload.exp) { return cb(new NotValidError('Token missing required claim: exp')); }
var aud = payload.aud;
if (!Array.isArray(aud)) {
aud = [ aud ];
}
var aok = aliases.some(function(a) { return aud.indexOf(a) != -1 });
if (!aok) {
return cb(new NotValidError('Token not intended for this audience'));
}
var now = Math.floor(Date.now() / 1000);
if (payload.exp < now) { // expired
return cb(new NotValidError('Token expired'));
}
if (payload.nbf && payload.nbf > now) { // not yet acceptable
return cb(new NotValidError('Token not yet acceptable'));
}
function keyed(err, key) {
if (err) { return cb(err); }
var ok = jws.verify(data, key);
if (!ok) {
return cb(new NotValidError('Token signature invalid'));
}
var claims = {};
claims.issuer = payload.iss;
claims.subject = payload.sub;
claims.audience = aud;
claims.expiresAt = moment.unix(payload.exp).toDate();
if (payload.azp) { claims.authorizedPresenter = payload.azp; }
return cb(null, claims);
}
try {
var arity = keying.length;
if (arity == 3) {
keying(payload.iss, header, keyed);
} else { // arity == 2
keying(payload.iss, keyed);
}
} catch (ex) {
return cb(ex);
}
};
};
| Parse scope in structured access token.
| lib/decode/sat.js | Parse scope in structured access token. | <ide><path>ib/decode/sat.js
<ide> }
<ide> opts = opts || {};
<ide>
<add> var aliases = audience;
<add> if (opts.audience) {
<add> aliases = audience.concat(opts.audience);
<add> }
<add>
<add>
<ide> // Decode the JWT so the header and payload are available, as they contain
<ide> // fields needed to find the corresponding key. Note that at this point, the
<ide> // assertion has not actually been verified. It will be verified later, after
<ide> // the keying material has been retrieved.
<ide> var token = jws.decode(data, { json: true });
<ide> if (!token) { return cb(); }
<del>
<del> var aliases = audience;
<del> if (opts.audience) {
<del> aliases = audience.concat(opts.audience);
<del> }
<ide>
<ide> var header = token.header
<ide> , payload = token.payload;
<ide> claims.audience = aud;
<ide> claims.expiresAt = moment.unix(payload.exp).toDate();
<ide> if (payload.azp) { claims.authorizedPresenter = payload.azp; }
<add> if (payload.scope) {
<add> // NOTE: "scope" is not defined as a claim by the SAT specification.
<add> // However, it is widely needed when making authorization
<add> // decisions, and is parsed here as a convienience. The parsing
<add> // is in accordance with established industry conventions, as set
<add> // by Google, IETF drafts, and others.
<add> //
<add> // References:
<add> // - https://developers.google.com/accounts/docs/OAuth2ServiceAccount
<add> // - http://tools.ietf.org/html/draft-richer-oauth-introspection-04
<add>
<add> claims.scope = payload.scope.split(' ');
<add> }
<ide>
<ide> return cb(null, claims);
<ide> } |
|
Java | apache-2.0 | error: pathspec 'main/src/com/pathtomani/entities/planet/PlanetSprites.java' did not match any file(s) known to git
| 3a8ea68c055d849ba782d89fb192668f876b76b1 | 1 | TheNightForum/Path-to-Mani,TheNightForum/Path-to-Mani,BurntGameProductions/Path-to-Mani,BurntGameProductions/Path-to-Mani,TheNightForum/Path-to-Mani,BurntGameProductions/Path-to-Mani | /*
* Copyright 2016 BurntGameProductions
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pathtomani.entities.planet;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.ContactImpulse;
import com.pathtomani.common.ManiMath;
import com.pathtomani.game.DmgType;
import com.pathtomani.game.FarObj;
import com.pathtomani.game.ManiGame;
import com.pathtomani.game.ManiObject;
import com.pathtomani.game.dra.Dra;
import java.util.List;
public class PlanetSprites implements ManiObject {
private final Planet myPlanet;
private float myRelAngleToPlanet;
private final float myDist;
private final List<Dra> myDras;
private final float myToPlanetRotSpd;
private final Vector2 myPos;
private float myAngle;
public PlanetSprites(Planet planet, float relAngleToPlanet, float dist, List<Dra> dras, float toPlanetRotSpd) {
myPlanet = planet;
myRelAngleToPlanet = relAngleToPlanet;
myDist = dist;
myDras = dras;
myToPlanetRotSpd = toPlanetRotSpd;
myPos = new Vector2();
setDependentParams();
}
@Override
public void update(ManiGame game) {
setDependentParams();
myRelAngleToPlanet += myToPlanetRotSpd * game.getTimeStep();
}
private void setDependentParams() {
float angleToPlanet = myPlanet.getAngle() + myRelAngleToPlanet;
ManiMath.fromAl(myPos, angleToPlanet, myDist, true);
myPos.add(myPlanet.getPos());
myAngle = angleToPlanet + 90;
}
@Override
public boolean shouldBeRemoved(ManiGame game) {
return false;
}
@Override
public void onRemove(ManiGame game) {
}
@Override
public void receiveDmg(float dmg, ManiGame game, Vector2 pos, DmgType dmgType) {
}
@Override
public boolean receivesGravity() {
return false;
}
@Override
public void receiveForce(Vector2 force, ManiGame game, boolean acc) {
}
@Override
public Vector2 getPosition() {
return myPos;
}
@Override
public FarObj toFarObj() {
return new FarPlanetSprites(myPlanet, myRelAngleToPlanet, myDist, myDras, myToPlanetRotSpd);
}
@Override
public List<Dra> getDras() {
return myDras;
}
@Override
public float getAngle() {
return myAngle;
}
@Override
public Vector2 getSpd() {
return null;
}
@Override
public void handleContact(ManiObject other, ContactImpulse impulse, boolean isA, float absImpulse,
ManiGame game, Vector2 collPos)
{
}
@Override
public String toDebugString() {
return null;
}
@Override
public Boolean isMetal() {
return false;
}
@Override
public boolean hasBody() {
return false;
}
}
| main/src/com/pathtomani/entities/planet/PlanetSprites.java | Moving files to Entities.
| main/src/com/pathtomani/entities/planet/PlanetSprites.java | Moving files to Entities. | <ide><path>ain/src/com/pathtomani/entities/planet/PlanetSprites.java
<add>/*
<add> * Copyright 2016 BurntGameProductions
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package com.pathtomani.entities.planet;
<add>
<add>import com.badlogic.gdx.math.Vector2;
<add>import com.badlogic.gdx.physics.box2d.ContactImpulse;
<add>import com.pathtomani.common.ManiMath;
<add>import com.pathtomani.game.DmgType;
<add>import com.pathtomani.game.FarObj;
<add>import com.pathtomani.game.ManiGame;
<add>import com.pathtomani.game.ManiObject;
<add>import com.pathtomani.game.dra.Dra;
<add>
<add>import java.util.List;
<add>
<add>public class PlanetSprites implements ManiObject {
<add>
<add> private final Planet myPlanet;
<add> private float myRelAngleToPlanet;
<add> private final float myDist;
<add> private final List<Dra> myDras;
<add> private final float myToPlanetRotSpd;
<add> private final Vector2 myPos;
<add> private float myAngle;
<add>
<add> public PlanetSprites(Planet planet, float relAngleToPlanet, float dist, List<Dra> dras, float toPlanetRotSpd) {
<add> myPlanet = planet;
<add> myRelAngleToPlanet = relAngleToPlanet;
<add> myDist = dist;
<add> myDras = dras;
<add> myToPlanetRotSpd = toPlanetRotSpd;
<add> myPos = new Vector2();
<add> setDependentParams();
<add> }
<add>
<add> @Override
<add> public void update(ManiGame game) {
<add> setDependentParams();
<add> myRelAngleToPlanet += myToPlanetRotSpd * game.getTimeStep();
<add> }
<add>
<add> private void setDependentParams() {
<add> float angleToPlanet = myPlanet.getAngle() + myRelAngleToPlanet;
<add> ManiMath.fromAl(myPos, angleToPlanet, myDist, true);
<add> myPos.add(myPlanet.getPos());
<add> myAngle = angleToPlanet + 90;
<add> }
<add>
<add> @Override
<add> public boolean shouldBeRemoved(ManiGame game) {
<add> return false;
<add> }
<add>
<add> @Override
<add> public void onRemove(ManiGame game) {
<add> }
<add>
<add> @Override
<add> public void receiveDmg(float dmg, ManiGame game, Vector2 pos, DmgType dmgType) {
<add> }
<add>
<add> @Override
<add> public boolean receivesGravity() {
<add> return false;
<add> }
<add>
<add> @Override
<add> public void receiveForce(Vector2 force, ManiGame game, boolean acc) {
<add> }
<add>
<add> @Override
<add> public Vector2 getPosition() {
<add> return myPos;
<add> }
<add>
<add> @Override
<add> public FarObj toFarObj() {
<add> return new FarPlanetSprites(myPlanet, myRelAngleToPlanet, myDist, myDras, myToPlanetRotSpd);
<add> }
<add>
<add> @Override
<add> public List<Dra> getDras() {
<add> return myDras;
<add> }
<add>
<add> @Override
<add> public float getAngle() {
<add> return myAngle;
<add> }
<add>
<add> @Override
<add> public Vector2 getSpd() {
<add> return null;
<add> }
<add>
<add> @Override
<add> public void handleContact(ManiObject other, ContactImpulse impulse, boolean isA, float absImpulse,
<add> ManiGame game, Vector2 collPos)
<add> {
<add> }
<add>
<add> @Override
<add> public String toDebugString() {
<add> return null;
<add> }
<add>
<add> @Override
<add> public Boolean isMetal() {
<add> return false;
<add> }
<add>
<add> @Override
<add> public boolean hasBody() {
<add> return false;
<add> }
<add>
<add>} |
|
Java | apache-2.0 | d0380d90070f98928f3d54144ab005dedd3d989a | 0 | hawkular/hawkular-agent,hawkular/hawkular-agent,hawkular/hawkular-agent | /*
* Copyright 2015-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.service;
import java.io.File;
import java.io.InputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.hawkular.agent.monitor.api.HawkularAgentContext;
import org.hawkular.agent.monitor.api.HawkularAgentContextImpl;
import org.hawkular.agent.monitor.cmd.Command;
import org.hawkular.agent.monitor.cmd.FeedCommProcessor;
import org.hawkular.agent.monitor.cmd.WebSocketClientBuilder;
import org.hawkular.agent.monitor.config.AgentCoreEngineConfiguration;
import org.hawkular.agent.monitor.config.AgentCoreEngineConfiguration.MetricsExporterConfiguration;
import org.hawkular.agent.monitor.diagnostics.Diagnostics;
import org.hawkular.agent.monitor.diagnostics.DiagnosticsImpl;
import org.hawkular.agent.monitor.diagnostics.JBossLoggingReporter;
import org.hawkular.agent.monitor.diagnostics.JBossLoggingReporter.LoggingLevel;
import org.hawkular.agent.monitor.log.AgentLoggers;
import org.hawkular.agent.monitor.log.MsgLogger;
import org.hawkular.agent.monitor.prometheus.WebServer;
import org.hawkular.agent.monitor.protocol.ProtocolServices;
import org.hawkular.agent.monitor.protocol.dmr.ModelControllerClientFactory;
import org.hawkular.agent.monitor.protocol.platform.PlatformMBeanGenerator;
import org.hawkular.agent.monitor.storage.HawkularStorageAdapter;
import org.hawkular.agent.monitor.storage.HttpClientBuilder;
import org.hawkular.agent.monitor.storage.InventoryStorageProxy;
import org.hawkular.agent.monitor.storage.NotificationDispatcher;
import org.hawkular.agent.monitor.storage.StorageAdapter;
import org.hawkular.agent.monitor.util.Util;
import org.hawkular.bus.common.BasicMessage;
import org.jboss.logging.Logger;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
/**
* The core engine of the Agent service.
*
* @author John Mazzitelli
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
public abstract class AgentCoreEngine {
private static final MsgLogger log = AgentLoggers.getLogger(AgentCoreEngine.class);
private AtomicReference<ServiceStatus> agentServiceStatus = new AtomicReference<>(ServiceStatus.INITIAL);
// the agent configuration
private AgentCoreEngineConfiguration configuration;
// this is used to identify us to the Hawkular environment as a particular feed
private String feedId;
// used to report our own internal metrics
private Diagnostics diagnostics;
private ScheduledReporter diagnosticsReporter;
// used to send monitored data for storage
private StorageAdapter storageAdapter;
private HttpClientBuilder httpClientBuilder;
// used to send/receive data to the server over the feed communications channel
private WebSocketClientBuilder webSocketClientBuilder;
private FeedCommProcessor feedComm;
// used to send notifications to the server
private NotificationDispatcher notificationDispatcher;
// used to wrap platform resources with MBeans
private PlatformMBeanGenerator platformMBeanGenerator;
// proxies if exposed that will allow external apps to store their own inventory
private final InventoryStorageProxy inventoryStorageProxy = new InventoryStorageProxy();
// contains endpoint services for all the different protocols that are supported (dmr, jmx, platform)
private ProtocolServices protocolServices;
// Used to talk to the management interface of the WildFly server the agent is deployed in.
// Will be null if agent is not running within a WildFly server.
private ModelControllerClientFactory localModelControllerClientFactory;
// maps whose keys are security realm names and values are SSLContext's and TrustManager's
private Map<String, SSLContext> trustOnlySSLContextValues;
private Map<String, TrustManager[]> trustOnlyTrustManagersValues;
// the endpoint that emits metrics
private WebServer metricsExporter;
public AgentCoreEngine(AgentCoreEngineConfiguration configuration) {
this.configuration = configuration;
}
public AgentCoreEngineConfiguration getConfiguration() {
return this.configuration;
}
/**
* @return the context that can be used by others for storing ad-hoc monitoring data
*/
public HawkularAgentContext getHawkularAgentContext() {
return new HawkularAgentContextImpl(inventoryStorageProxy);
}
/**
* @return the status of the agent service. Will let you know if this service
* is {@link #startHawkularAgent() started} or {@link #stopHawkularAgent() stopped}.
*/
public ServiceStatus getStatus() {
synchronized (agentServiceStatus) {
return agentServiceStatus.get();
}
}
private void setStatus(ServiceStatus newStatus) {
synchronized (agentServiceStatus) {
agentServiceStatus.set(newStatus);
agentServiceStatus.notifyAll();
}
}
/**
* Starts this service. If the service is already started, this method is a no-op.
*/
public void startHawkularAgent() {
startHawkularAgent(null);
}
/**
* Starts this service. If the service is already started, this method is a no-op.
*
* @param newConfiguration if not null is used to build the runtime configuration. Use this to reflect
* changes in the persisted configuration (e.g. standalone.xml) since service creation.
*/
public void startHawkularAgent(AgentCoreEngineConfiguration newConfiguration) {
synchronized (agentServiceStatus) {
boolean processStatus = true;
while (processStatus) {
switch (agentServiceStatus.get()) {
case RUNNING: {
return; // we are already started
}
case STARTING: {
// Let our current thread simply wait for the agent to start since some other thread is starting this service.
// We abort if we find the agent in the STOPPED state since that means the startup failed for some reason.
log.infoAlreadyStarting();
while (agentServiceStatus.get() != ServiceStatus.RUNNING
&& agentServiceStatus.get() != ServiceStatus.STOPPED) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
return; // we are either running or the startup failed; either way, return
}
case STOPPING: {
// In the process of stopping; we want to restart but only after fully stopped.
// Once leaving the STOPPING state, we go back up and do what is appropriate for the new status.
log.infoAgentWillStartAfterStopping();
while (agentServiceStatus.get() == ServiceStatus.STOPPING) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
processStatus = true;
break;
}
case STOPPED:
case INITIAL: {
processStatus = false;
break; // this is the normal case - we are stopped and we are being asked to start now
}
}
}
// let's begin starting the agent now
setStatus(ServiceStatus.STARTING);
}
try {
log.infoVersion(Version.getVersionString());
log.infoStarting();
// Determine the configuration to use immediately.
// WARNING! Do not use any inventory metadata (e.g. metric types, resource types) from this
// configuration yet. We have not attempted to download the full configuration from the server.
// Until we do, we might have non-existent or out-of-date inventory metadata.
// But we need this configuration now for things like getting the server endpoint information
// so we can connect to the server in the first place (which is needed in order to download
// the rest of the configuration).
if (null != newConfiguration) {
this.configuration = newConfiguration;
}
// if the agent has been disabled, abort startup and return immediately
if (!this.configuration.getGlobalConfiguration().isSubsystemEnabled()) {
log.infoAgentDisabled();
setStatus(ServiceStatus.STOPPED);
return;
}
this.trustOnlySSLContextValues = buildTrustOnlySSLContextValues(this.configuration);
this.trustOnlyTrustManagersValues = buildTrustOnlyTrustManagersValues(this.configuration);
// If we are to talk to the hawkular server securely, get the storage adapter security realm
// details and use it in the web socket client builder.
SSLContext ssl = null;
X509TrustManager x509TrustManager = null;
String securityRealm = configuration.getStorageAdapter().getSecurityRealm();
if (securityRealm != null) {
ssl = trustOnlySSLContextValues.get(securityRealm);
TrustManager[] tms = trustOnlyTrustManagersValues.get(securityRealm);
if (tms != null) {
for (TrustManager tm : tms) {
if (tm instanceof X509TrustManager) {
x509TrustManager = (X509TrustManager) tm;
}
}
}
}
// prepare the builder that will create our HTTP/REST clients to the hawkular server infrastructure
this.httpClientBuilder = new HttpClientBuilder(this.configuration.getStorageAdapter(), ssl,
x509TrustManager);
// get our self identifiers
this.localModelControllerClientFactory = buildLocalModelControllerClientFactory();
if (this.configuration.getStorageAdapter().getFeedId() != null) {
this.feedId = this.configuration.getStorageAdapter().getFeedId();
} else {
this.feedId = autoGenerateFeedId();
}
log.infoAgentFeedId(this.feedId);
// Before we go on, we must make sure the Hawkular Server is up and ready
waitForHawkularServer();
// Now attempt to download the inventory metadata configuration from the server,
// overlaying it over the current configuration. Once this call completes, we will
// have our full configuration and can use even the inventory metadata configuration.
downloadAndOverlayConfiguration();
// build the diagnostics object that will be used to track our own performance
final MetricRegistry metricRegistry = new MetricRegistry();
this.diagnostics = new DiagnosticsImpl(configuration.getDiagnostics(), metricRegistry, feedId);
// try to connect to the server via command-gateway channel; keep going on error
try {
this.webSocketClientBuilder = new WebSocketClientBuilder(
this.configuration.getStorageAdapter(), ssl, x509TrustManager);
this.feedComm = new FeedCommProcessor(
this.webSocketClientBuilder,
buildAdditionalCommands(),
this.feedId,
this);
this.feedComm.connect();
} catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
log.errorCannotEstablishFeedComm(e);
}
// start the storage adapter
try {
startStorageAdapter();
} catch (Exception e) {
log.errorCannotStartStorageAdapter(e);
throw new Exception("Agent cannot start storage adapter");
}
// now that we started the storage adapter, we can create our dispatcher
this.notificationDispatcher = new NotificationDispatcher(this.storageAdapter, this.feedId);
// this wraps the platform resources with MBeans so their metrics can be exposed via JMX
this.platformMBeanGenerator = new PlatformMBeanGenerator(this.feedId,
configuration.getPlatformConfiguration());
this.platformMBeanGenerator.registerAllMBeans();
// build the protocol services
ProtocolServices ps = createProtocolServicesBuilder()
.dmrProtocolService(this.localModelControllerClientFactory, configuration.getDmrConfiguration())
.jmxProtocolService(configuration.getJmxConfiguration())
.autoDiscoveryScanPeriodSecs(
configuration.getGlobalConfiguration().getAutoDiscoveryScanPeriodSeconds())
.build();
ps.addInventoryListener(inventoryStorageProxy);
if (notificationDispatcher != null) {
ps.addInventoryListener(notificationDispatcher);
}
protocolServices = ps;
// start all protocol services - this should perform the initial discovery scans
protocolServices.start();
// start the metrics exporter if enabled
try {
startMetricsExporter();
} catch (Exception e) {
log.errorf(e, "Cannot start metrics exporter - continuing but no metrics will be available");
}
setStatus(ServiceStatus.RUNNING);
} catch (Throwable t) {
if (t instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
log.errorFailedToStartAgent(t);
// artificially shutdown the agent - agent will be disabled now
stopHawkularAgent();
}
}
/**
* Stops this service. If the service is already stopped, this method is a no-op.
*/
public void stopHawkularAgent() {
synchronized (agentServiceStatus) {
if (agentServiceStatus.get() == ServiceStatus.STOPPED) {
log.infoStoppedAlready();
return; // we are already stopped
} else if (agentServiceStatus.get() == ServiceStatus.STOPPING) {
// some other thread is already stopping the agent - wait for that to finish and just return
while (agentServiceStatus.get() == ServiceStatus.STOPPING) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
return;
}
}
setStatus(ServiceStatus.STOPPING);
}
log.infoStopping();
AtomicReference<Throwable> error = new AtomicReference<>(null); // will hold the first error we encountered
try {
// disconnect from the feed comm channel
try {
if (metricsExporter != null) {
log.infoStopMetricsExporter();
metricsExporter.stop();
metricsExporter = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown metrics exporter but will continue shutdown", t);
}
// disconnect from the feed comm channel
try {
if (feedComm != null) {
feedComm.destroy();
feedComm = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown feed comm but will continue shutdown", t);
}
// stop our normal protocol services
try {
if (protocolServices != null) {
protocolServices.stop();
if (inventoryStorageProxy != null) {
protocolServices.removeInventoryListener(inventoryStorageProxy);
}
if (platformMBeanGenerator != null) {
platformMBeanGenerator.unregisterAllMBeans();
}
if (notificationDispatcher != null) {
protocolServices.removeInventoryListener(notificationDispatcher);
}
protocolServices = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown protocol services but will continue shutdown", t);
}
// now stop the storage adapter
try {
if (storageAdapter != null) {
storageAdapter.shutdown();
storageAdapter = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown storage adapter but will continue shutdown", t);
}
// stop diagnostic reporting and spit out a final diagnostics report
if (diagnosticsReporter != null) {
diagnosticsReporter.stop();
if (configuration.getDiagnostics().isEnabled()) {
diagnosticsReporter.report();
}
diagnosticsReporter = null;
}
// allow subclasses to cleanup
try {
cleanupDuringStop();
} catch (Exception e) {
error.compareAndSet(null, e);
log.debug("Cannot shutdown - subclass exception", e);
}
// We attempted to clean everything we could. If we hit an error, throw it to log our shutdown wasn't clean
if (error.get() != null) {
throw error.get();
}
} catch (Throwable t) {
log.warnFailedToStopAgent(t);
} finally {
setStatus(ServiceStatus.STOPPED);
}
}
/**
* Creates and starts the storage adapter that will be used to store our inventory data and monitoring data.
*
* @throws Exception if failed to start the storage adapter
*/
private void startStorageAdapter() throws Exception {
// create the storage adapter that will write our metrics/inventory data to backend storage on server
this.storageAdapter = new HawkularStorageAdapter();
this.storageAdapter.initialize(
feedId,
configuration.getStorageAdapter(),
diagnostics,
httpClientBuilder);
// provide our storage adapter to the proxies - allows external apps to use them to store its own data
inventoryStorageProxy.setStorageAdapter(storageAdapter);
// log our own diagnostic reports
this.diagnosticsReporter = JBossLoggingReporter.forRegistry(this.diagnostics.getMetricRegistry())
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.outputTo(Logger.getLogger(getClass()))
.withLoggingLevel(LoggingLevel.DEBUG)
.build();
if (this.configuration.getDiagnostics().isEnabled()) {
diagnosticsReporter.start(this.configuration.getDiagnostics().getInterval(),
this.configuration.getDiagnostics().getTimeUnits());
}
}
private void startMetricsExporter() throws Exception {
MetricsExporterConfiguration meConfig = configuration.getMetricsExporterConfiguration();
if (meConfig.isEnabled()) {
String hostPort;
if (meConfig.getHost() != null) {
hostPort = String.format("%s:%d", meConfig.getHost(), meConfig.getPort());
} else {
hostPort = String.format("%d", meConfig.getPort());
}
File configFile = downloadMetricsExporterConfigFile();
if (configFile != null) {
String[] args = new String[] { hostPort, configFile.getAbsolutePath() };
log.infoStartMetricsExporter(args[0], args[1]);
metricsExporter = new WebServer();
metricsExporter.start(args);
} else {
log.infoMetricsExporterDisabled();
}
} else {
log.infoMetricsExporterDisabled();
}
}
private void waitForHawkularServer() throws Exception {
waitForHawkularInventory();
}
private void waitForHawkularInventory() throws Exception {
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String statusUrl = Util.getContextUrlString(configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext()).append("status").toString();
Request request = this.httpClientBuilder.buildJsonGetRequest(statusUrl, null);
int counter = 0;
while (true) {
Response response = null;
try {
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
if (response.code() != 401) {
log.debugf("Hawkular Inventory is not ready yet: %d/%s", response.code(), response.message());
} else {
log.warnBadHawkularCredentials(response.code(), response.message());
}
} else {
String bodyString = response.body().string();
if (checkStatusReallyUp(bodyString)) {
log.infof("Hawkular Inventory is ready: %s", bodyString);
break;
} else {
log.debugf("Hawkular Inventory is still starting: %s", bodyString);
}
}
} catch (Exception e) {
log.debugf("Hawkular Inventory is not ready yet: %s", e.toString());
} finally {
if (response != null) {
response.body().close();
}
}
Thread.sleep(5000L);
counter++;
if (counter % 12 == 0) {
log.warnConnectionDelayed(counter, "inventory", statusUrl);
}
}
}
private void downloadAndOverlayConfiguration() throws Exception {
// If we have no inventory metadata at all, we are required to download the config successfully;
// an exception is thrown if we cannot download and overlay the config.
// If we already have some inventory metadata already, then we will not abort with an exception
// on download/overlay failure - we'll just continue with the old inventory metadata.
boolean requireDownload = this.configuration.getDmrConfiguration().getTypeSets().isDisabledOrEmpty()
&& this.configuration.getJmxConfiguration().getTypeSets().isDisabledOrEmpty();
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String url = Util.getContextUrlString(
configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext())
.append("get-inventory-config")
.append("/")
.append(this.configuration.getGlobalConfiguration().getTypeVersion())
.toString();
Request request = this.httpClientBuilder.buildGetRequest(url, null);
Response response = null;
Exception error = null;
try {
log.debugf("Downloading inventory configuration from server: %s", url);
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
error = new Exception(String.format("Cannot download inventory configuration [%s]: %d/%s",
this.configuration.getGlobalConfiguration().getTypeVersion(),
response.code(),
response.message()));
} else {
this.configuration = overlayConfiguration(response.body().byteStream());
}
} catch (Exception e) {
error = new Exception(String.format("Failed to download and overlay inventory configuration [%s]",
this.configuration.getGlobalConfiguration().getTypeVersion()), e);
} finally {
if (response != null) {
response.body().close();
}
}
if (error != null) {
if (requireDownload) {
throw error;
} else {
log.errorf(error, "%s. Will continue with the previous inventory configuration.");
}
}
}
private File downloadMetricsExporterConfigFile() throws Exception {
MetricsExporterConfiguration meConfig = configuration.getMetricsExporterConfiguration();
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String url = Util.getContextUrlString(
configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext())
.append("get-jmx-exporter-config")
.append("/")
.append(meConfig.getConfigFile())
.toString();
Request request = this.httpClientBuilder.buildGetRequest(url, null);
Response response = null;
File configFileToWrite = null;
try {
log.debugf("Downloading jmx exporter configuration from server: %s", url);
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
log.errorf("Cannot download metrics exporter config file [%s]: %d/%s",
meConfig.getConfigFile(),
response.code(),
response.message());
} else {
String bodyString = response.body().string();
configFileToWrite = expectedMetricsExporterFile();
Util.write(bodyString, configFileToWrite);
}
} catch (Exception e) {
log.errorf(e, "Failed to download metrics exporter config file [%s]", meConfig.getConfigFile());
configFileToWrite = null;
} finally {
if (response != null) {
response.body().close();
}
}
if (configFileToWrite == null) {
// if we couldn't download the current version, attempt to use an old version if we have one already
File oldConfigFile = expectedMetricsExporterFile();
if (oldConfigFile.canRead()) {
log.warnf("Using existing metrics exporter config file at [%s]", oldConfigFile.getAbsolutePath());
configFileToWrite = oldConfigFile;
}
}
return configFileToWrite;
}
private File expectedMetricsExporterFile() {
String configFileName = configuration.getMetricsExporterConfiguration().getConfigFile();
if (!configFileName.endsWith("-jmx-exporter.yaml")) {
configFileName += "-jmx-exporter.yaml";
}
return new File(configuration.getMetricsExporterConfiguration().getConfigDir(), configFileName);
}
/**
* If the server returns a 200 OK, we still need to check the content if the server
* is really up. This is explained here: https://twitter.com/heiglandreas/status/801137903149654017
* @param bodyString String representation of the body
* @return true if it is really up, false otherwise (still starting).
*/
private boolean checkStatusReallyUp(String bodyString) {
Map<?, ?> result = null;
try {
result = new ObjectMapper().readValue(bodyString, Map.class);
} catch (Exception e) {
return false;
}
String status = (String) result.get("status");
return "UP".equals(status);
}
/**
* @return feed ID of the agent if the agent has started and the feed was registered; null otherwise
*/
public String getFeedId() {
return this.feedId;
}
/**
* @return a factory that can create clients which can talk to the local management interface
* of the app server we are running in. Will be null if agent is not running in a WildFly server.
*/
public ModelControllerClientFactory getLocalModelControllerClientFactory() {
return localModelControllerClientFactory;
}
/**
* @return builder that let's you create protocol services and their endpoints
*/
public ProtocolServices.Builder createProtocolServicesBuilder() {
return ProtocolServices.builder(feedId, diagnostics, trustOnlySSLContextValues);
}
/**
* @return the current set of protocol services
*/
public ProtocolServices getProtocolServices() {
return protocolServices;
}
/**
* @return true if the agent is to be considered immutable and no config changes are allowed. This should
* also disallow operation execution on managed resources if those operations modify the remote resource.
*/
public boolean isImmutable() {
return this.configuration.getGlobalConfiguration().isImmutable();
}
/**
* Subclasses need to build the SSL contexts for all security realms it supports.
* @param config the agent configuration
* @return map of security realm names to SSL contexts
*/
protected abstract Map<String, SSLContext> buildTrustOnlySSLContextValues(AgentCoreEngineConfiguration config);
/**
* Subclasses need to build the trust managers for all security realms it supports.
* @param config the agent configuration
* @return map of security realm names to trust managers
*/
protected abstract Map<String, TrustManager[]> buildTrustOnlyTrustManagersValues(
AgentCoreEngineConfiguration config);
/**
* @return If the agent is running in a WildFly container, this should return
* a non-null client to the agent's own container. Null should be returned if the agent is
* not running within a WildFly controller or it cannot be determined if it is.
*/
protected abstract ModelControllerClientFactory buildLocalModelControllerClientFactory();
/**
* This is called when the agent is starting up has obtained a configuration file
* that needs to be overlaid on top of the current configuration.
*
* @param newConfig the stream containing the new overlay configuration file
* @return the new runtime configuration to be used by the running agent
*/
protected abstract AgentCoreEngineConfiguration overlayConfiguration(InputStream newConfig);
/**
* Subclasses are free to override if there are things that need to be done while shutting down.
*/
protected abstract void cleanupDuringStop();
/**
* When the configuration does not specify a feed ID, this method will be called to create one.
* It is best if this feed ID can be generated the same across agent restarts.
*
* @return the autogenerated feed ID
*
* @throws Exception if the feed ID cannot be generated
*/
protected abstract String autoGenerateFeedId() throws Exception;
/**
* If the agent can support additional command gateway commands (above and beyond the default
* ones that you get for free with the core agent engine), subclasses can return those
* command definitions here.
*
* @return additional command gateway commands that can be processed by the agent
*/
protected abstract Map<String, Class<? extends Command<? extends BasicMessage, ? extends BasicMessage>>> //
buildAdditionalCommands();
}
| hawkular-agent-core/src/main/java/org/hawkular/agent/monitor/service/AgentCoreEngine.java | /*
* Copyright 2015-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.service;
import java.io.File;
import java.io.InputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.hawkular.agent.monitor.api.HawkularAgentContext;
import org.hawkular.agent.monitor.api.HawkularAgentContextImpl;
import org.hawkular.agent.monitor.cmd.Command;
import org.hawkular.agent.monitor.cmd.FeedCommProcessor;
import org.hawkular.agent.monitor.cmd.WebSocketClientBuilder;
import org.hawkular.agent.monitor.config.AgentCoreEngineConfiguration;
import org.hawkular.agent.monitor.config.AgentCoreEngineConfiguration.MetricsExporterConfiguration;
import org.hawkular.agent.monitor.diagnostics.Diagnostics;
import org.hawkular.agent.monitor.diagnostics.DiagnosticsImpl;
import org.hawkular.agent.monitor.diagnostics.JBossLoggingReporter;
import org.hawkular.agent.monitor.diagnostics.JBossLoggingReporter.LoggingLevel;
import org.hawkular.agent.monitor.log.AgentLoggers;
import org.hawkular.agent.monitor.log.MsgLogger;
import org.hawkular.agent.monitor.prometheus.WebServer;
import org.hawkular.agent.monitor.protocol.ProtocolServices;
import org.hawkular.agent.monitor.protocol.dmr.ModelControllerClientFactory;
import org.hawkular.agent.monitor.protocol.platform.PlatformMBeanGenerator;
import org.hawkular.agent.monitor.storage.HawkularStorageAdapter;
import org.hawkular.agent.monitor.storage.HttpClientBuilder;
import org.hawkular.agent.monitor.storage.InventoryStorageProxy;
import org.hawkular.agent.monitor.storage.NotificationDispatcher;
import org.hawkular.agent.monitor.storage.StorageAdapter;
import org.hawkular.agent.monitor.util.Util;
import org.hawkular.bus.common.BasicMessage;
import org.jboss.logging.Logger;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
/**
* The core engine of the Agent service.
*
* @author John Mazzitelli
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
public abstract class AgentCoreEngine {
private static final MsgLogger log = AgentLoggers.getLogger(AgentCoreEngine.class);
private AtomicReference<ServiceStatus> agentServiceStatus = new AtomicReference<>(ServiceStatus.INITIAL);
// the agent configuration
private AgentCoreEngineConfiguration configuration;
// this is used to identify us to the Hawkular environment as a particular feed
private String feedId;
// used to report our own internal metrics
private Diagnostics diagnostics;
private ScheduledReporter diagnosticsReporter;
// used to send monitored data for storage
private StorageAdapter storageAdapter;
private HttpClientBuilder httpClientBuilder;
// used to send/receive data to the server over the feed communications channel
private WebSocketClientBuilder webSocketClientBuilder;
private FeedCommProcessor feedComm;
// used to send notifications to the server
private NotificationDispatcher notificationDispatcher;
// used to wrap platform resources with MBeans
private PlatformMBeanGenerator platformMBeanGenerator;
// proxies if exposed that will allow external apps to store their own inventory
private final InventoryStorageProxy inventoryStorageProxy = new InventoryStorageProxy();
// contains endpoint services for all the different protocols that are supported (dmr, jmx, platform)
private ProtocolServices protocolServices;
// Used to talk to the management interface of the WildFly server the agent is deployed in.
// Will be null if agent is not running within a WildFly server.
private ModelControllerClientFactory localModelControllerClientFactory;
// maps whose keys are security realm names and values are SSLContext's and TrustManager's
private Map<String, SSLContext> trustOnlySSLContextValues;
private Map<String, TrustManager[]> trustOnlyTrustManagersValues;
// the endpoint that emits metrics
private WebServer metricsExporter;
public AgentCoreEngine(AgentCoreEngineConfiguration configuration) {
this.configuration = configuration;
}
public AgentCoreEngineConfiguration getConfiguration() {
return this.configuration;
}
/**
* @return the context that can be used by others for storing ad-hoc monitoring data
*/
public HawkularAgentContext getHawkularAgentContext() {
return new HawkularAgentContextImpl(inventoryStorageProxy);
}
/**
* @return the status of the agent service. Will let you know if this service
* is {@link #startHawkularAgent() started} or {@link #stopHawkularAgent() stopped}.
*/
public ServiceStatus getStatus() {
synchronized (agentServiceStatus) {
return agentServiceStatus.get();
}
}
private void setStatus(ServiceStatus newStatus) {
synchronized (agentServiceStatus) {
agentServiceStatus.set(newStatus);
agentServiceStatus.notifyAll();
}
}
/**
* Starts this service. If the service is already started, this method is a no-op.
*/
public void startHawkularAgent() {
startHawkularAgent(null);
}
/**
* Starts this service. If the service is already started, this method is a no-op.
*
* @param newConfiguration if not null is used to build the runtime configuration. Use this to reflect
* changes in the persisted configuration (e.g. standalone.xml) since service creation.
*/
public void startHawkularAgent(AgentCoreEngineConfiguration newConfiguration) {
synchronized (agentServiceStatus) {
boolean processStatus = true;
while (processStatus) {
switch (agentServiceStatus.get()) {
case RUNNING: {
return; // we are already started
}
case STARTING: {
// Let our current thread simply wait for the agent to start since some other thread is starting this service.
// We abort if we find the agent in the STOPPED state since that means the startup failed for some reason.
log.infoAlreadyStarting();
while (agentServiceStatus.get() != ServiceStatus.RUNNING
&& agentServiceStatus.get() != ServiceStatus.STOPPED) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
return; // we are either running or the startup failed; either way, return
}
case STOPPING: {
// In the process of stopping; we want to restart but only after fully stopped.
// Once leaving the STOPPING state, we go back up and do what is appropriate for the new status.
log.infoAgentWillStartAfterStopping();
while (agentServiceStatus.get() == ServiceStatus.STOPPING) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
processStatus = true;
break;
}
case STOPPED:
case INITIAL: {
processStatus = false;
break; // this is the normal case - we are stopped and we are being asked to start now
}
}
}
// let's begin starting the agent now
setStatus(ServiceStatus.STARTING);
}
try {
log.infoVersion(Version.getVersionString());
log.infoStarting();
// Determine the configuration to use immediately.
// WARNING! Do not use any inventory metadata (e.g. metric types, resource types) from this
// configuration yet. We have not attempted to download the full configuration from the server.
// Until we do, we might have non-existent or out-of-date inventory metadata.
// But we need this configuration now for things like getting the server endpoint information
// so we can connect to the server in the first place (which is needed in order to download
// the rest of the configuration).
if (null != newConfiguration) {
this.configuration = newConfiguration;
}
// if the agent has been disabled, abort startup and return immediately
if (!this.configuration.getGlobalConfiguration().isSubsystemEnabled()) {
log.infoAgentDisabled();
setStatus(ServiceStatus.STOPPED);
return;
}
this.trustOnlySSLContextValues = buildTrustOnlySSLContextValues(this.configuration);
this.trustOnlyTrustManagersValues = buildTrustOnlyTrustManagersValues(this.configuration);
// If we are to talk to the hawkular server securely, get the storage adapter security realm
// details and use it in the web socket client builder.
SSLContext ssl = null;
X509TrustManager x509TrustManager = null;
String securityRealm = configuration.getStorageAdapter().getSecurityRealm();
if (securityRealm != null) {
ssl = trustOnlySSLContextValues.get(securityRealm);
TrustManager[] tms = trustOnlyTrustManagersValues.get(securityRealm);
if (tms != null) {
for (TrustManager tm : tms) {
if (tm instanceof X509TrustManager) {
x509TrustManager = (X509TrustManager) tm;
}
}
}
}
// prepare the builder that will create our HTTP/REST clients to the hawkular server infrastructure
this.httpClientBuilder = new HttpClientBuilder(this.configuration.getStorageAdapter(), ssl,
x509TrustManager);
// get our self identifiers
this.localModelControllerClientFactory = buildLocalModelControllerClientFactory();
if (this.configuration.getStorageAdapter().getFeedId() != null) {
this.feedId = this.configuration.getStorageAdapter().getFeedId();
} else {
this.feedId = autoGenerateFeedId();
}
log.infoAgentFeedId(this.feedId);
// Before we go on, we must make sure the Hawkular Server is up and ready
waitForHawkularServer();
// Now attempt to download the inventory metadata configuration from the server,
// overlaying it over the current configuration. Once this call completes, we will
// have our full configuration and can use even the inventory metadata configuration.
downloadAndOverlayConfiguration();
// build the diagnostics object that will be used to track our own performance
final MetricRegistry metricRegistry = new MetricRegistry();
this.diagnostics = new DiagnosticsImpl(configuration.getDiagnostics(), metricRegistry, feedId);
// try to connect to the server via command-gateway channel; keep going on error
try {
this.webSocketClientBuilder = new WebSocketClientBuilder(
this.configuration.getStorageAdapter(), ssl, x509TrustManager);
this.feedComm = new FeedCommProcessor(
this.webSocketClientBuilder,
buildAdditionalCommands(),
this.feedId,
this);
this.feedComm.connect();
} catch (Exception e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
log.errorCannotEstablishFeedComm(e);
}
// start the storage adapter
try {
startStorageAdapter();
} catch (Exception e) {
log.errorCannotStartStorageAdapter(e);
throw new Exception("Agent cannot start storage adapter");
}
// now that we started the storage adapter, we can create our dispatcher
this.notificationDispatcher = new NotificationDispatcher(this.storageAdapter, this.feedId);
// this wraps the platform resources with MBeans so their metrics can be exposed via JMX
this.platformMBeanGenerator = new PlatformMBeanGenerator(this.feedId,
configuration.getPlatformConfiguration());
this.platformMBeanGenerator.registerAllMBeans();
// build the protocol services
ProtocolServices ps = createProtocolServicesBuilder()
.dmrProtocolService(this.localModelControllerClientFactory, configuration.getDmrConfiguration())
.jmxProtocolService(configuration.getJmxConfiguration())
.autoDiscoveryScanPeriodSecs(
configuration.getGlobalConfiguration().getAutoDiscoveryScanPeriodSeconds())
.build();
ps.addInventoryListener(inventoryStorageProxy);
if (notificationDispatcher != null) {
ps.addInventoryListener(notificationDispatcher);
}
protocolServices = ps;
// start all protocol services - this should perform the initial discovery scans
protocolServices.start();
// start the metrics exporter if enabled
startMetricsExporter();
setStatus(ServiceStatus.RUNNING);
} catch (Throwable t) {
if (t instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
log.errorFailedToStartAgent(t);
// artificially shutdown the agent - agent will be disabled now
stopHawkularAgent();
}
}
/**
* Stops this service. If the service is already stopped, this method is a no-op.
*/
public void stopHawkularAgent() {
synchronized (agentServiceStatus) {
if (agentServiceStatus.get() == ServiceStatus.STOPPED) {
log.infoStoppedAlready();
return; // we are already stopped
} else if (agentServiceStatus.get() == ServiceStatus.STOPPING) {
// some other thread is already stopping the agent - wait for that to finish and just return
while (agentServiceStatus.get() == ServiceStatus.STOPPING) {
try {
agentServiceStatus.wait(30000L);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
return;
}
}
setStatus(ServiceStatus.STOPPING);
}
log.infoStopping();
AtomicReference<Throwable> error = new AtomicReference<>(null); // will hold the first error we encountered
try {
// disconnect from the feed comm channel
try {
if (metricsExporter != null) {
log.infoStopMetricsExporter();
metricsExporter.stop();
metricsExporter = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown metrics exporter but will continue shutdown", t);
}
// disconnect from the feed comm channel
try {
if (feedComm != null) {
feedComm.destroy();
feedComm = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown feed comm but will continue shutdown", t);
}
// stop our normal protocol services
try {
if (protocolServices != null) {
protocolServices.stop();
if (inventoryStorageProxy != null) {
protocolServices.removeInventoryListener(inventoryStorageProxy);
}
if (platformMBeanGenerator != null) {
platformMBeanGenerator.unregisterAllMBeans();
}
if (notificationDispatcher != null) {
protocolServices.removeInventoryListener(notificationDispatcher);
}
protocolServices = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown protocol services but will continue shutdown", t);
}
// now stop the storage adapter
try {
if (storageAdapter != null) {
storageAdapter.shutdown();
storageAdapter = null;
}
} catch (Throwable t) {
error.compareAndSet(null, t);
log.debug("Cannot shutdown storage adapter but will continue shutdown", t);
}
// stop diagnostic reporting and spit out a final diagnostics report
if (diagnosticsReporter != null) {
diagnosticsReporter.stop();
if (configuration.getDiagnostics().isEnabled()) {
diagnosticsReporter.report();
}
diagnosticsReporter = null;
}
// allow subclasses to cleanup
try {
cleanupDuringStop();
} catch (Exception e) {
error.compareAndSet(null, e);
log.debug("Cannot shutdown - subclass exception", e);
}
// We attempted to clean everything we could. If we hit an error, throw it to log our shutdown wasn't clean
if (error.get() != null) {
throw error.get();
}
} catch (Throwable t) {
log.warnFailedToStopAgent(t);
} finally {
setStatus(ServiceStatus.STOPPED);
}
}
/**
* Creates and starts the storage adapter that will be used to store our inventory data and monitoring data.
*
* @throws Exception if failed to start the storage adapter
*/
private void startStorageAdapter() throws Exception {
// create the storage adapter that will write our metrics/inventory data to backend storage on server
this.storageAdapter = new HawkularStorageAdapter();
this.storageAdapter.initialize(
feedId,
configuration.getStorageAdapter(),
diagnostics,
httpClientBuilder);
// provide our storage adapter to the proxies - allows external apps to use them to store its own data
inventoryStorageProxy.setStorageAdapter(storageAdapter);
// log our own diagnostic reports
this.diagnosticsReporter = JBossLoggingReporter.forRegistry(this.diagnostics.getMetricRegistry())
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.outputTo(Logger.getLogger(getClass()))
.withLoggingLevel(LoggingLevel.DEBUG)
.build();
if (this.configuration.getDiagnostics().isEnabled()) {
diagnosticsReporter.start(this.configuration.getDiagnostics().getInterval(),
this.configuration.getDiagnostics().getTimeUnits());
}
}
private void startMetricsExporter() throws Exception {
MetricsExporterConfiguration meConfig = configuration.getMetricsExporterConfiguration();
if (meConfig.isEnabled()) {
String hostPort;
if (meConfig.getHost() != null) {
hostPort = String.format("%s:%d", meConfig.getHost(), meConfig.getPort());
} else {
hostPort = String.format("%d", meConfig.getPort());
}
File configFile = downloadMetricsExporterConfigFile();
if (configFile != null) {
String[] args = new String[] { hostPort, configFile.getAbsolutePath() };
log.infoStartMetricsExporter(args[0], args[1]);
metricsExporter = new WebServer();
metricsExporter.start(args);
} else {
log.infoMetricsExporterDisabled();
}
} else {
log.infoMetricsExporterDisabled();
}
}
private void waitForHawkularServer() throws Exception {
waitForHawkularInventory();
}
private void waitForHawkularInventory() throws Exception {
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String statusUrl = Util.getContextUrlString(configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext()).append("status").toString();
Request request = this.httpClientBuilder.buildJsonGetRequest(statusUrl, null);
int counter = 0;
while (true) {
Response response = null;
try {
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
if (response.code() != 401) {
log.debugf("Hawkular Inventory is not ready yet: %d/%s", response.code(), response.message());
} else {
log.warnBadHawkularCredentials(response.code(), response.message());
}
} else {
String bodyString = response.body().string();
if (checkStatusReallyUp(bodyString)) {
log.infof("Hawkular Inventory is ready: %s", bodyString);
break;
} else {
log.debugf("Hawkular Inventory is still starting: %s", bodyString);
}
}
} catch (Exception e) {
log.debugf("Hawkular Inventory is not ready yet: %s", e.toString());
} finally {
if (response != null) {
response.body().close();
}
}
Thread.sleep(5000L);
counter++;
if (counter % 12 == 0) {
log.warnConnectionDelayed(counter, "inventory", statusUrl);
}
}
}
private void downloadAndOverlayConfiguration() throws Exception {
// If we have no inventory metadata at all, we are required to download the config successfully;
// an exception is thrown if we cannot download and overlay the config.
// If we already have some inventory metadata already, then we will not abort with an exception
// on download/overlay failure - we'll just continue with the old inventory metadata.
boolean requireDownload = this.configuration.getDmrConfiguration().getTypeSets().isDisabledOrEmpty()
&& this.configuration.getJmxConfiguration().getTypeSets().isDisabledOrEmpty();
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String url = Util.getContextUrlString(
configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext())
.append("get-inventory-config")
.append("/")
.append(this.configuration.getGlobalConfiguration().getTypeVersion())
.toString();
Request request = this.httpClientBuilder.buildGetRequest(url, null);
Response response = null;
Exception error = null;
try {
log.debugf("Downloading inventory configuration from server: %s", url);
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
error = new Exception(String.format("Cannot download inventory configuration [%s]: %d/%s",
this.configuration.getGlobalConfiguration().getTypeVersion(),
response.code(),
response.message()));
} else {
this.configuration = overlayConfiguration(response.body().byteStream());
}
} catch (Exception e) {
error = new Exception(String.format("Failed to download and overlay inventory configuration [%s]",
this.configuration.getGlobalConfiguration().getTypeVersion()), e);
} finally {
if (response != null) {
response.body().close();
}
}
if (error != null) {
if (requireDownload) {
throw error;
} else {
log.errorf(error, "%s. Will continue with the previous inventory configuration.");
}
}
}
private File downloadMetricsExporterConfigFile() throws Exception {
MetricsExporterConfiguration meConfig = configuration.getMetricsExporterConfiguration();
OkHttpClient httpclient = this.httpClientBuilder.getHttpClient();
String url = Util.getContextUrlString(
configuration.getStorageAdapter().getUrl(),
configuration.getStorageAdapter().getInventoryContext())
.append("get-jmx-exporter-config")
.append("/")
.append(meConfig.getConfigFile())
.toString();
Request request = this.httpClientBuilder.buildGetRequest(url, null);
Response response = null;
File configFileToWrite = null;
try {
log.debugf("Downloading jmx exporter configuration from server: %s", url);
response = httpclient.newCall(request).execute();
if (response.code() != 200) {
log.errorf("Cannot download metrics exporter config file [%s]: %d/%s",
meConfig.getConfigFile(),
response.code(),
response.message());
} else {
String bodyString = response.body().string();
configFileToWrite = expectedMetricsExporterFile();
Util.write(bodyString, configFileToWrite);
}
} catch (Exception e) {
log.errorf(e, "Failed to download metrics exporter config file [%s]", meConfig.getConfigFile());
configFileToWrite = null;
} finally {
if (response != null) {
response.body().close();
}
}
if (configFileToWrite == null) {
// if we couldn't download the current version, attempt to use an old version if we have one already
File oldConfigFile = expectedMetricsExporterFile();
if (oldConfigFile.canRead()) {
log.warnf("Using existing metrics exporter config file at [%s]", oldConfigFile.getAbsolutePath());
configFileToWrite = oldConfigFile;
}
}
return configFileToWrite;
}
private File expectedMetricsExporterFile() {
String configFileName = configuration.getMetricsExporterConfiguration().getConfigFile();
if (!configFileName.endsWith("-jmx-exporter.yaml")) {
configFileName += "-jmx-exporter.yaml";
}
return new File(configuration.getMetricsExporterConfiguration().getConfigDir(), configFileName);
}
/**
* If the server returns a 200 OK, we still need to check the content if the server
* is really up. This is explained here: https://twitter.com/heiglandreas/status/801137903149654017
* @param bodyString String representation of the body
* @return true if it is really up, false otherwise (still starting).
*/
private boolean checkStatusReallyUp(String bodyString) {
Map<?, ?> result = null;
try {
result = new ObjectMapper().readValue(bodyString, Map.class);
} catch (Exception e) {
return false;
}
String status = (String) result.get("status");
return "UP".equals(status);
}
/**
* @return feed ID of the agent if the agent has started and the feed was registered; null otherwise
*/
public String getFeedId() {
return this.feedId;
}
/**
* @return a factory that can create clients which can talk to the local management interface
* of the app server we are running in. Will be null if agent is not running in a WildFly server.
*/
public ModelControllerClientFactory getLocalModelControllerClientFactory() {
return localModelControllerClientFactory;
}
/**
* @return builder that let's you create protocol services and their endpoints
*/
public ProtocolServices.Builder createProtocolServicesBuilder() {
return ProtocolServices.builder(feedId, diagnostics, trustOnlySSLContextValues);
}
/**
* @return the current set of protocol services
*/
public ProtocolServices getProtocolServices() {
return protocolServices;
}
/**
* @return true if the agent is to be considered immutable and no config changes are allowed. This should
* also disallow operation execution on managed resources if those operations modify the remote resource.
*/
public boolean isImmutable() {
return this.configuration.getGlobalConfiguration().isImmutable();
}
/**
* Subclasses need to build the SSL contexts for all security realms it supports.
* @param config the agent configuration
* @return map of security realm names to SSL contexts
*/
protected abstract Map<String, SSLContext> buildTrustOnlySSLContextValues(AgentCoreEngineConfiguration config);
/**
* Subclasses need to build the trust managers for all security realms it supports.
* @param config the agent configuration
* @return map of security realm names to trust managers
*/
protected abstract Map<String, TrustManager[]> buildTrustOnlyTrustManagersValues(
AgentCoreEngineConfiguration config);
/**
* @return If the agent is running in a WildFly container, this should return
* a non-null client to the agent's own container. Null should be returned if the agent is
* not running within a WildFly controller or it cannot be determined if it is.
*/
protected abstract ModelControllerClientFactory buildLocalModelControllerClientFactory();
/**
* This is called when the agent is starting up has obtained a configuration file
* that needs to be overlaid on top of the current configuration.
*
* @param newConfig the stream containing the new overlay configuration file
* @return the new runtime configuration to be used by the running agent
*/
protected abstract AgentCoreEngineConfiguration overlayConfiguration(InputStream newConfig);
/**
* Subclasses are free to override if there are things that need to be done while shutting down.
*/
protected abstract void cleanupDuringStop();
/**
* When the configuration does not specify a feed ID, this method will be called to create one.
* It is best if this feed ID can be generated the same across agent restarts.
*
* @return the autogenerated feed ID
*
* @throws Exception if the feed ID cannot be generated
*/
protected abstract String autoGenerateFeedId() throws Exception;
/**
* If the agent can support additional command gateway commands (above and beyond the default
* ones that you get for free with the core agent engine), subclasses can return those
* command definitions here.
*
* @return additional command gateway commands that can be processed by the agent
*/
protected abstract Map<String, Class<? extends Command<? extends BasicMessage, ? extends BasicMessage>>> //
buildAdditionalCommands();
}
| do not abort agent startup if the metrics endpoint fails - let the agent keep going but log an ugly error message (#424)
| hawkular-agent-core/src/main/java/org/hawkular/agent/monitor/service/AgentCoreEngine.java | do not abort agent startup if the metrics endpoint fails - let the agent keep going but log an ugly error message (#424) | <ide><path>awkular-agent-core/src/main/java/org/hawkular/agent/monitor/service/AgentCoreEngine.java
<ide> protocolServices.start();
<ide>
<ide> // start the metrics exporter if enabled
<del> startMetricsExporter();
<add> try {
<add> startMetricsExporter();
<add> } catch (Exception e) {
<add> log.errorf(e, "Cannot start metrics exporter - continuing but no metrics will be available");
<add> }
<ide>
<ide> setStatus(ServiceStatus.RUNNING);
<ide> |
|
Java | epl-1.0 | ae048ec9861a3cabca435401df10f0acfde1f8b8 | 0 | xen-0/dawnsci,DawnScience/dawnsci,belkassaby/dawnsci,eclipse/dawnsci,jamesmudd/dawnsci,jamesmudd/dawnsci,jamesmudd/dawnsci | /*-
* Copyright (c) 2012 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.dataset;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.complex.Complex;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.dataset.impl.ByteDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.ComplexDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundShortDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Dataset;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetFactory;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetUtils;
import org.eclipse.dawnsci.analysis.dataset.impl.DoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.FloatDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.IndexIterator;
import org.eclipse.dawnsci.analysis.dataset.impl.IntegerDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Maths;
import org.eclipse.dawnsci.analysis.dataset.impl.Random;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class MathsTest {
private final static int SSTEP = 15;
private final static int SITER = 3;
private final static double ABSERRD = 1e-8;
private final static double ABSERRF = 1e-5;
private final static double RELERR = 1e-5;
private final static int ISIZEA = 2;
private final static int ISIZEB = 3;
private final static int MAXISIZE = Math.max(ISIZEA, ISIZEB);
@Before
public void setUpClass() {
classes = new LinkedHashMap<String, Integer>();
// classes.put("Boolean", Dataset.BOOL);
classes.put("Byte", Dataset.INT8);
classes.put("Short", Dataset.INT16);
classes.put("Integer", Dataset.INT32);
classes.put("Long", Dataset.INT64);
classes.put("Float", Dataset.FLOAT32);
classes.put("Double", Dataset.FLOAT64);
classes.put("ComplexF", Dataset.COMPLEX64);
classes.put("ComplexD", Dataset.COMPLEX128);
classes.put("ArrayB", Dataset.ARRAYINT8);
classes.put("ArrayS", Dataset.ARRAYINT16);
classes.put("ArrayI", Dataset.ARRAYINT32);
classes.put("ArrayL", Dataset.ARRAYINT64);
classes.put("ArrayF", Dataset.ARRAYFLOAT32);
classes.put("ArrayD", Dataset.ARRAYFLOAT64);
TestUtils.verboseOutput = true;
}
@After
public void closeDown() {
TestUtils.verboseOutput = false;
}
private Map<String, Integer> classes;
private void checkDatasets(Object a, Object b, Dataset c, Dataset d) {
Assert.assertNotNull(c);
Assert.assertNotNull(d);
Assert.assertEquals("Dtype does not match", c.getDtype(), d.getDtype());
Assert.assertEquals("Size does not match", c.getSize(), d.getSize());
Assert.assertEquals("ISize does not match", c.getElementsPerItem(), d.getElementsPerItem());
Assert.assertArrayEquals("Shape does not match", c.getShape(), d.getShape());
final IndexIterator ci = c.getIterator(true);
final IndexIterator di = d.getIterator();
final int is = c.getElementsPerItem();
final double abserr = (c.getDtype() == Dataset.FLOAT32 ||
c.getDtype() == Dataset.COMPLEX64 ||
c.getDtype() == Dataset.ARRAYFLOAT32) ? ABSERRF : ABSERRD;
if (is == 1) {
while (ci.hasNext() && di.hasNext()) {
double av = c.getElementDoubleAbs(ci.index);
double bv = d.getElementDoubleAbs(di.index);
double tol = Math.max(abserr, Math.abs(av*RELERR));
if (Math.abs(av - bv) > tol) {
if (a != null) {
if (a instanceof Dataset)
System.err.printf("A was %s ", ((Dataset) a).getString(ci.getPos()));
else
System.err.printf("A was %s ", a);
}
if (b != null) {
if (b instanceof Dataset)
System.err.printf("B was %s ", ((Dataset) b).getString(ci.getPos()));
else
System.err.printf("B was %s ", b);
}
System.err.printf("at %s\n", Arrays.toString(ci.getPos()));
}
Assert.assertEquals("Value does not match at " + Arrays.toString(ci.getPos()) + ", with tol " + tol + ": ",
av, bv, tol);
}
} else {
while (ci.hasNext() && di.hasNext()) {
for (int j = 0; j < is; j++) {
double av = c.getElementDoubleAbs(ci.index + j);
double bv = d.getElementDoubleAbs(di.index + j);
double tol = Math.max(abserr, Math.abs(av*RELERR));
if (Math.abs(av - bv) > tol) {
if (a != null) {
if (a instanceof Dataset)
System.err.printf("A was %s ", ((Dataset) a).getString(ci.getPos()));
else
System.err.printf("A was %s ", a);
}
if (b != null) {
if (b instanceof Dataset)
System.err.printf("B was %s ", ((Dataset) b).getString(ci.getPos()));
else
System.err.printf("B was %s ", b);
}
System.err.printf("at %s\n", Arrays.toString(ci.getPos()));
}
Assert.assertEquals("Value does not match at " + Arrays.toString(ci.getPos()) + "; " + j +
", with tol " + tol + ": ", av, bv, tol);
}
}
}
}
@Test
public void testAddition() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s to %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.add(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).add((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
+ ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da + b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) + db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
+ b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant to %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.add(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() + dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) + dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testSubtraction() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s to %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).subtract((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
- ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da - b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) - db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
- b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by sub for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant from %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() - dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) - dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s from constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.subtract((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv - ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv - a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by sub for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testMultiplication() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.multiply(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply(b.getElementDoubleAbs(itb.index)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).multiply((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
* ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da * b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) * db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
* b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by mul for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant with %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.multiply(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() * dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) * dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by mul for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Ignore
@Test
public void testDivision() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.divide(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).divide((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
Complex z = (Complex) a.getObjectAbs(ita.index);
double br = b.getElementDoubleAbs(itb.index);
Complex zr = z.divide(br);
if (br == 0) { // CM's implementation is different to NumPy's
zr = new Complex(z.getReal() != 0 ? z.getReal() / br : zr.getReal(),
z.getImaginary() != 0 ? z.getImaginary() / br : zr.getImaginary());
}
d.setObjectAbs(j, zr);
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).divide((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
if (d.hasFloatingPointElements()) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
/ ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
while (ita.hasNext() && itb.hasNext()) {
double bv = ((Number) b.getObjectAbs(itb.index)).doubleValue();
d.setObjectAbs(j++, bv == 0 ? 0 :((Number) a.getObjectAbs(ita.index)).doubleValue()
/ bv);
}
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
if (d.hasFloatingPointElements()) {
for (int k = 0; k < ISIZEB; k++) {
answer[k] = xa / b.getElementDoubleAbs(itb.index + k);
}
} else {
for (int k = 0; k < ISIZEB; k++) {
final double v = xa / b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
if (d.hasFloatingPointElements()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / xb;
}
} else {
if (xb == 0) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = 0;
}
} else {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / xb;
}
}
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
if (d.hasFloatingPointElements()) {
double v;
for (int k = 0; k < is; k++) {
v = a.getElementDoubleAbs(ita.index + k)
/ b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
} else {
double v;
for (int k = 0; k < is; k++) {
v = a.getElementDoubleAbs(ita.index + k)
/ b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.divide(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).divide(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() / dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.divide(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.divide((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv / ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv / a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Ignore
@Test
public void testRemainder() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
% ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = xa % b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % xb;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() % dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv % ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv % a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testPower() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.power(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).pow((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, Math.pow(a.getElementDoubleAbs(ita.index),
b.getElementDoubleAbs(itb.index)));
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = Math.pow(xa, b.getElementDoubleAbs(itb.index + k));
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k), xb);
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k),
b.getElementDoubleAbs(itb.index + k));
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.power(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, Math.pow(((Number) a.getObjectAbs(ita.index)).doubleValue(), dv));
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k), dv);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.power(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.pow((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, Math.pow(dv, ((Number) a.getObjectAbs(ita.index)).doubleValue()));
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(dv, a.getElementDoubleAbs(ita.index + k));
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testDifference() {
int[] data = {0,1,3,9,5,10};
Dataset a = new IntegerDataset(data, null);
Dataset d = Maths.difference(a, 1, -1);
int[] tdata;
tdata = new int[] {1, 2, 6, -4, 5};
Dataset ta = new IntegerDataset(tdata, null);
checkDatasets(null, null, d, ta);
Slice[] slices = new Slice[] {new Slice(3)};
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
slices = new Slice[] {new Slice(-2, null, -1)};
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new ComplexDoubleDataset(new double[] {0, 1, 2, 3, 4, 5});
d = Maths.difference(a, 1, -1);
ta = new ComplexDoubleDataset(new double[] {2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5});
d = Maths.difference(a, 1, -1);
ta = new CompoundDoubleDataset(2, new double[] {2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new ByteDataset(new byte[] {0, 1, 2, 4, 7, 11});
d = Maths.difference(a, 2, -1);
ta = new ByteDataset(new byte[] {0, 1, 1, 1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
a = new CompoundShortDataset(2, new short[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.difference(a, 2, -1);
ta = new CompoundShortDataset(2, new short[] {0, 0, 1, -1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.difference(a, 2, -1);
ta = new CompoundDoubleDataset(2, new double[] {0, 0, 1, -1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
}
@Test
public void testGradient() {
double[] data = {1, 2, 4, 7, 11, 16};
double[] tdata;
Dataset a = new DoubleDataset(data, null);
Dataset d = Maths.gradient(a).get(0);
tdata = new double[] {1., 1.5, 2.5, 3.5, 4.5, 5.};
Dataset ta = new DoubleDataset(tdata, null);
checkDatasets(null, null, d, ta);
Slice[] slices = new Slice[] {new Slice(3)};
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
Dataset b = DatasetFactory.createRange(a.getShape()[0], a.getDtype());
b.imultiply(2);
tdata = new double[] {0.5 , 0.75, 1.25, 1.75, 2.25, 2.5};
ta = new DoubleDataset(tdata, null);
d = Maths.gradient(a, b).get(0);
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices), b.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices), b.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
data = new double[] {1, 2, 6, 3, 4, 5};
a = new DoubleDataset(data, 2, 3);
List<? extends Dataset> l = Maths.gradient(a);
tdata = new double[] { 2., 2., -1., 2., 2., -1.};
ta = new DoubleDataset(tdata, 2, 3);
checkDatasets(null, null, l.get(0), ta);
tdata = new double[] { 1., 2.5, 4., 1., 1., 1.};
ta = new DoubleDataset(tdata, 2, 3);
checkDatasets(null, null, l.get(1), ta);
b = DatasetFactory.createRange(a.getShape()[0], a.getDtype());
b.imultiply(2);
Dataset c = DatasetFactory.createRange(a.getShape()[1], a.getDtype());
c.imultiply(-1.5);
l = Maths.gradient(a, b, c);
tdata = new double[] { 2., 2., -1., 2., 2., -1.};
ta = new DoubleDataset(tdata, 2, 3);
ta.idivide(2);
checkDatasets(null, null, l.get(0), ta);
tdata = new double[] { 1., 2.5, 4., 1., 1., 1.};
ta = new DoubleDataset(tdata, 2, 3);
ta.idivide(-1.5);
checkDatasets(null, null, l.get(1), ta);
a = new ByteDataset(new byte[] {0, 1, 2, 4, 7, 11});
d = Maths.gradient(a).get(0);
ta = new ByteDataset(new byte[] {1, 1, 1, 2, 3, 4});
checkDatasets(null, null, d, ta);
slices = new Slice[] {new Slice(-2, null, -1)};
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new ComplexDoubleDataset(new double[] {0, 1, 2, 3, 4, 5});
d = Maths.gradient(a).get(0);
ta = new ComplexDoubleDataset(new double[] {2, 2, 2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new CompoundShortDataset(2, new short[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.gradient(a).get(0);
ta = new CompoundShortDataset(2, new short[] {2, 2, 2, 2, 2, 1, 3, 1});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.gradient(a).get(0);
ta = new CompoundDoubleDataset(2, new double[] {2, 2, 2, 2, 2.5, 1.5, 3, 1});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
}
/**
* Test rounding
*/
@Test
public void testRounding() {
DoubleDataset t;
DoubleDataset x;
double tol = 1e-6;
double[] val = { -1.7, -1.5, -1.2, 0.3, 1.4, 1.5, 1.6 };
t = new DoubleDataset(val);
double[] resFloor = { -2, -2, -2, 0, 1, 1, 1 };
x = (DoubleDataset) Maths.floor(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resFloor[i], x.get(i), tol);
}
double[] resCeil = { -1, -1, -1, 1, 2, 2, 2 };
x = (DoubleDataset) Maths.ceil(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resCeil[i], x.get(i), tol);
}
double[] resRint= { -2, -2, -1, 0, 1, 2, 2 };
x = (DoubleDataset) Maths.rint(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resRint[i], x.get(i), tol);
}
}
private void checkInterpolate(Dataset a, double x) {
int s = a.getShapeRef()[0];
// double v = Maths.interpolate(a, x);
double v = Maths.interpolate(a, new double[] {x});
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0) {
f2 = a.getDouble(0);
} else if (x >= s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolate2(Dataset a, double x) {
int s = a.getShapeRef()[0];
Dataset dv = Maths.interpolate(DatasetFactory.createRange(s, Dataset.INT32), a, DatasetFactory.createFromObject(x), null, null);
double v = dv.getElementDoubleAbs(0);
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0) {
f2 = a.getDouble(0);
} else if (x >= s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolate3(Dataset a, double x) {
int s = a.getShapeRef()[0];
Dataset dv = Maths.interpolate(DatasetFactory.createRange(s, Dataset.INT32), a, DatasetFactory.createFromObject(x), 0, 0);
double v = dv.getElementDoubleAbs(0);
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0 || x > s - 1) {
} else if (x == s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolateArray(CompoundDataset a, double x) {
int s = a.getShapeRef()[0];
int is = a.getElementsPerItem();
double[] v = new double[is];
Maths.interpolate(v, a, x);
int i = (int) Math.floor(x);
double[] e = new double[is];
double[] f1 = new double[is];
double[] f2 = new double[is];
if (x <= -1 || x >= s) {
} else if (x < 0) {
a.getDoubleArray(f2, 0);
} else if (x >= s - 1) {
a.getDoubleArray(f1, s - 1);
} else {
a.getDoubleArray(f1, i);
a.getDoubleArray(f2, i + 1);
}
double t = x - i;
for (int j = 0; j < is; j++)
e[j] = (1 - t) * f1[j] + t * f2[j];
Assert.assertArrayEquals(e, v, 1e-15);
}
private void checkInterpolate(Dataset a, double x, double y) {
int s0 = a.getShapeRef()[0];
int s1 = a.getShapeRef()[1];
// double v = Maths.interpolate(a, x, y);
double v = Maths.interpolate(a, new double[] {x, y});
if (x <= -1 || x >= s0 || y <= -1 || y >= s1) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
int j = (int) Math.floor(y);
double t1 = x - i;
double t2 = y - j;
double f1 = 0, f2 = 0, f3 = 0, f4 = 0;
if (y < 0) {
if (x < 0) {
f4 = a.getDouble(0, 0);
} else if (x >= s0 - 1) {
f3 = a.getDouble(s0 - 1, 0);
} else {
f3 = a.getDouble(i, 0);
f4 = a.getDouble(i + 1, 0);
}
} else if (y >= s1 - 1) {
if (x < 0) {
f2 = a.getDouble(0, s1 - 1);
} else if (x >= s0 - 1) {
f1 = a.getDouble(s0 - 1, s1 - 1);
} else {
f1 = a.getDouble(i, s1 - 1);
f2 = a.getDouble(i + 1, s1 -1);
}
} else {
if (x < 0) {
f2 = a.getDouble(0, j);
f4 = a.getDouble(0, j + 1);
} else if (x >= s0 - 1) {
f1 = a.getDouble(s0 - 1, j);
f3 = a.getDouble(s0 - 1, j + 1);
} else {
f1 = a.getDouble(i, j);
f2 = a.getDouble(i + 1, j);
f3 = a.getDouble(i, j + 1);
f4 = a.getDouble(i + 1, j + 1);
}
}
double r = (1 - t1) * (1 - t2) * f1 + t1 * (1 - t2) * f2 + (1 - t1) * t2 * f3 + t1 * t2 * f4;
Assert.assertEquals(r, v, 1e-15);
v = Maths.interpolate(a, DatasetFactory.ones(a), x, y);
Assert.assertEquals(r, v, 1e-15);
}
private void checkInterpolateArray(CompoundDataset a, double x, double y) {
int s0 = a.getShapeRef()[0];
int s1 = a.getShapeRef()[1];
int is = a.getElementsPerItem();
double[] v = new double[is];
Maths.interpolate(v, a, x, y);
if (x <= -1 || x >= s0 || y <= -1 || y >= s1) {
Assert.assertArrayEquals(new double[is], v, 1e-15);
return;
}
double[] f1 = new double[is];
double[] f2 = new double[is];
double[] f3 = new double[is];
double[] f4 = new double[is];
int i = (int) Math.floor(x);
int j = (int) Math.floor(y);
double t1 = x - i;
double t2 = y - j;
if (y < 0) {
if (x < 0) {
a.getDoubleArray(f4, 0, 0);
} else if (x >= s0 - 1) {
a.getDoubleArray(f3, s0 - 1, 0);
} else {
a.getDoubleArray(f3, i, 0);
a.getDoubleArray(f4, i + 1, 0);
}
} else if (y >= s1 - 1) {
if (x < 0) {
a.getDoubleArray(f2, 0, s1 - 1);
} else if (x >= s0 - 1) {
a.getDoubleArray(f1, s0 - 1, s1 - 1);
} else {
a.getDoubleArray(f1, i, s1 - 1);
a.getDoubleArray(f2, i + 1, s1 -1);
}
} else {
if (x < 0) {
a.getDoubleArray(f2, 0, j);
a.getDoubleArray(f4, 0, j + 1);
} else if (x >= s0 - 1) {
a.getDoubleArray(f1, s0 - 1, j);
a.getDoubleArray(f3, s0 - 1, j + 1);
} else {
a.getDoubleArray(f1, i, j);
a.getDoubleArray(f2, i + 1, j);
a.getDoubleArray(f3, i, j + 1);
a.getDoubleArray(f4, i + 1, j + 1);
}
}
for (j = 0; j < is; j++) {
f1[j] = (1 - t1) * (1 - t2) * f1[j] + t1 * (1 - t2) * f2[j] + (1 - t1) * t2 * f3[j] + t1 * t2 * f4[j];
}
Assert.assertArrayEquals(f1, v, 1e-15);
}
@Test
public void testLinearInterpolation() {
Dataset xa = DatasetFactory.createRange(60, Dataset.INT32);
xa.iadd(1);
double[] xc = {-1.25, -1, -0.25, 0, 0.25, 58.25, 59, 59.25, 60, 60.25};
for (double x : xc) {
// System.out.printf("%g\n", x);
checkInterpolate(xa, x);
checkInterpolate2(xa, x);
checkInterpolate3(xa, x);
}
Dataset xb = DatasetFactory.createRange(120, Dataset.INT32);
xb.setShape(60, 2);
xb.ifloorDivide(2);
xb = DatasetUtils.createCompoundDatasetFromLastAxis(xb, true);
for (double x : xc) {
checkInterpolate(xb, x);
checkInterpolate2(xb, x);
checkInterpolate3(xb, x);
}
AbstractDatasetTest.checkDatasets(Maths.interpolate(DatasetFactory.createFromObject(new double[] {1, 2, 3}), DatasetFactory.createFromObject(new double[] {3, 2, 0}), DatasetFactory.createFromObject(new double[] {0, 1, 1.5, 2.72, 3.14}), 3, 0), DatasetFactory.createFromObject(new double[] {3. , 3. , 2.5 , 0.56, 0.}));
CompoundDataset cxb = (CompoundDataset) xb;
for (double x : xc) {
checkInterpolateArray(cxb, x);
}
xa.setShape(6, 10);
xc = new double[] {-1.25, -1, -0.25, 0, 0.25, 5.25, 6, 6.25, 7};
double[] yc = {-1.25, -1, -0.25, 0, 0.25, 8.25, 9, 9.25, 10, 10.25};
for (double x : xc) {
for (double y : yc) {
// System.out.printf("%g %g\n", x, y);
checkInterpolate(xa, x, y);
}
}
cxb.setShape(6, 10);
// xc = new double[] {-0.25, 0, 0.25, 5.25, 6, 6.25, 7};
// yc = new double[] {9.25, 10, 10.25};
for (double x : xc) {
for (double y : yc) {
// System.out.printf("%g %g\n", x, y);
checkInterpolateArray(cxb, x, y);
}
}
}
@Test
public void testBitwise() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
Dataset xb = DatasetFactory.createRange(8, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 1, 2, 3, 0, 1, 2, 3}),
Maths.bitwiseAnd(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -3, -2, -1, 4, 5, 6, 7}),
Maths.bitwiseOr(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -4, -4, -4, 4, 4, 4, 4}),
Maths.bitwiseXor(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {3, 2, 1, 0, -1, -2, -3, -4}),
Maths.bitwiseInvert(xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-1, -2, -3, -4, -5, -6, -7, -8}),
Maths.bitwiseInvert(xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -6, -8, -8, 0, 32, -128, -128}),
Maths.leftShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 10, 24, 56}),
Maths.leftShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 0, 2, 8, 24}),
Maths.leftShift(xa, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -2, -1, -1, 0, 0, 0, 0}),
Maths.rightShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 2, 1, 0}),
Maths.rightShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-1, -1, -1, -1, 0, 0, 0, 0}),
Maths.rightShift(xa, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, 126, 63, 31, 0, 0, 0, 0}),
Maths.unsignedRightShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 2, 1, 0}),
Maths.unsignedRightShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 0, 0, 0, 0}),
Maths.unsignedRightShift(xa, xa), ABSERRD, ABSERRD);
}
@Test
public void testDivideTowardsFloor() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-2, -2, -1, -1, 0, 0, 1, 1}),
Maths.divideTowardsFloor(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {2, 1, 1, 0, 0, -1, -1, -2}),
Maths.divideTowardsFloor(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {-1.6, -1.2, -0.8, -0.4, 0, 0.4, 0.8, 1.2}),
Maths.divideTowardsFloor(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {1.6f, 1.2f, 0.8f, 0.4f, 0 , -0.4f, -0.8f, -1.2f}),
Maths.divideTowardsFloor(xa, -2.5f), true, ABSERRD, ABSERRD);
}
@Test
public void testFloorDivide() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-2, -2, -1, -1, 0, 0, 1, 1}),
Maths.floorDivide(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {2, 1, 1, 0, 0, -1, -1, -2}),
Maths.floorDivide(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {-2, -2, -1, -1, 0, 0, 0, 1}),
Maths.floorDivide(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {1, 1, 0, 0, 0, -1, -1, -2}),
Maths.floorDivide(xa, -2.5f), true, ABSERRD, ABSERRD);
}
@Test
public void testFloorRemainder() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 1, 0, 1, 0, 1, 0, 1}),
Maths.floorRemainder(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, -1, 0, -1, 0, -1, 0, -1}),
Maths.floorRemainder(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {1, 2, 0.5, 1.5, 0, 1, 2, 0.5}),
Maths.floorRemainder(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {-1.5f, -0.5f, -2, -1, 0, -1.5f, -0.5f, -2}),
Maths.floorRemainder(xa, -2.5f), true, ABSERRD, ABSERRD);
}
}
| org.eclipse.dawnsci.analysis.dataset.test/src/org/eclipse/dawnsci/analysis/dataset/MathsTest.java | /*-
* Copyright (c) 2012 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.dataset;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.math3.complex.Complex;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.dataset.impl.ByteDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.ComplexDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundShortDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Dataset;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetFactory;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetUtils;
import org.eclipse.dawnsci.analysis.dataset.impl.DoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.FloatDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.IndexIterator;
import org.eclipse.dawnsci.analysis.dataset.impl.IntegerDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Maths;
import org.eclipse.dawnsci.analysis.dataset.impl.Random;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class MathsTest {
private final static int SSTEP = 15;
private final static int SITER = 3;
private final static double ABSERRD = 1e-8;
private final static double ABSERRF = 1e-5;
private final static double RELERR = 1e-5;
private final static int ISIZEA = 2;
private final static int ISIZEB = 3;
private final static int MAXISIZE = Math.max(ISIZEA, ISIZEB);
@Before
public void setUpClass() {
classes = new LinkedHashMap<String, Integer>();
// classes.put("Boolean", Dataset.BOOL);
classes.put("Byte", Dataset.INT8);
classes.put("Short", Dataset.INT16);
classes.put("Integer", Dataset.INT32);
classes.put("Long", Dataset.INT64);
classes.put("Float", Dataset.FLOAT32);
classes.put("Double", Dataset.FLOAT64);
classes.put("ComplexF", Dataset.COMPLEX64);
classes.put("ComplexD", Dataset.COMPLEX128);
classes.put("ArrayB", Dataset.ARRAYINT8);
classes.put("ArrayS", Dataset.ARRAYINT16);
classes.put("ArrayI", Dataset.ARRAYINT32);
classes.put("ArrayL", Dataset.ARRAYINT64);
classes.put("ArrayF", Dataset.ARRAYFLOAT32);
classes.put("ArrayD", Dataset.ARRAYFLOAT64);
TestUtils.verboseOutput = true;
}
@After
public void closeDown() {
TestUtils.verboseOutput = false;
}
private Map<String, Integer> classes;
private void checkDatasets(Object a, Object b, Dataset c, Dataset d) {
Assert.assertNotNull(c);
Assert.assertNotNull(d);
Assert.assertEquals("Dtype does not match", c.getDtype(), d.getDtype());
Assert.assertEquals("Size does not match", c.getSize(), d.getSize());
Assert.assertEquals("ISize does not match", c.getElementsPerItem(), d.getElementsPerItem());
Assert.assertArrayEquals("Shape does not match", c.getShape(), d.getShape());
final IndexIterator ci = c.getIterator(true);
final IndexIterator di = d.getIterator();
final int is = c.getElementsPerItem();
final double abserr = (c.getDtype() == Dataset.FLOAT32 ||
c.getDtype() == Dataset.COMPLEX64 ||
c.getDtype() == Dataset.ARRAYFLOAT32) ? ABSERRF : ABSERRD;
if (is == 1) {
while (ci.hasNext() && di.hasNext()) {
double av = c.getElementDoubleAbs(ci.index);
double bv = d.getElementDoubleAbs(di.index);
double tol = Math.max(abserr, Math.abs(av*RELERR));
if (Math.abs(av - bv) > tol) {
if (a != null) {
if (a instanceof Dataset)
System.err.printf("A was %s ", ((Dataset) a).getString(ci.getPos()));
else
System.err.printf("A was %s ", a);
}
if (b != null) {
if (b instanceof Dataset)
System.err.printf("B was %s ", ((Dataset) b).getString(ci.getPos()));
else
System.err.printf("B was %s ", b);
}
System.err.printf("at %s\n", Arrays.toString(ci.getPos()));
}
Assert.assertEquals("Value does not match at " + Arrays.toString(ci.getPos()) + ", with tol " + tol + ": ",
av, bv, tol);
}
} else {
while (ci.hasNext() && di.hasNext()) {
for (int j = 0; j < is; j++) {
double av = c.getElementDoubleAbs(ci.index + j);
double bv = d.getElementDoubleAbs(di.index + j);
double tol = Math.max(abserr, Math.abs(av*RELERR));
if (Math.abs(av - bv) > tol) {
if (a != null) {
if (a instanceof Dataset)
System.err.printf("A was %s ", ((Dataset) a).getString(ci.getPos()));
else
System.err.printf("A was %s ", a);
}
if (b != null) {
if (b instanceof Dataset)
System.err.printf("B was %s ", ((Dataset) b).getString(ci.getPos()));
else
System.err.printf("B was %s ", b);
}
System.err.printf("at %s\n", Arrays.toString(ci.getPos()));
}
Assert.assertEquals("Value does not match at " + Arrays.toString(ci.getPos()) + "; " + j +
", with tol " + tol + ": ", av, bv, tol);
}
}
}
}
@Test
public void testAddition() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s to %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.add(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).add((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
+ ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da + b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) + db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
+ b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant to %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.add(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).add(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() + dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) + dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testSubtraction() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s to %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).subtract((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
- ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da - b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) - db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
- b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by sub for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant from %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).subtract(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() - dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) - dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by add for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s from constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.subtract(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.subtract((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv - ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv - a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by sub for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testMultiplication() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.multiply(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply(b.getElementDoubleAbs(itb.index)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).multiply((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
* ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double da = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = da * b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double db = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) * db;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k)
* b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by mul for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant with %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.multiply(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).multiply(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() * dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) * dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by mul for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Ignore
@Test
public void testDivision() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.divide(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).divide((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
Complex z = (Complex) a.getObjectAbs(ita.index);
double br = b.getElementDoubleAbs(itb.index);
Complex zr = z.divide(br);
if (br == 0) { // CM's implementation is different to NumPy's
zr = new Complex(z.getReal() != 0 ? z.getReal() / br : zr.getReal(),
z.getImaginary() != 0 ? z.getImaginary() / br : zr.getImaginary());
}
d.setObjectAbs(j, zr);
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).divide((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
if (d.hasFloatingPointElements()) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
/ ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
while (ita.hasNext() && itb.hasNext()) {
double bv = ((Number) b.getObjectAbs(itb.index)).doubleValue();
d.setObjectAbs(j++, bv == 0 ? 0 :((Number) a.getObjectAbs(ita.index)).doubleValue()
/ bv);
}
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
if (d.hasFloatingPointElements()) {
for (int k = 0; k < ISIZEB; k++) {
answer[k] = xa / b.getElementDoubleAbs(itb.index + k);
}
} else {
for (int k = 0; k < ISIZEB; k++) {
final double v = xa / b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
if (d.hasFloatingPointElements()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / xb;
}
} else {
if (xb == 0) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = 0;
}
} else {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / xb;
}
}
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
if (d.hasFloatingPointElements()) {
double v;
for (int k = 0; k < is; k++) {
v = a.getElementDoubleAbs(ita.index + k)
/ b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
} else {
double v;
for (int k = 0; k < is; k++) {
v = a.getElementDoubleAbs(ita.index + k)
/ b.getElementDoubleAbs(itb.index + k);
answer[k] = Double.isInfinite(v) || Double.isNaN(v) ? 0 : v;
}
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.divide(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).divide(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() / dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) / dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.divide(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: %s\n", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.divide((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv / ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv / a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by div for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testRemainder() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue()
% ((Number) b.getObjectAbs(itb.index)).doubleValue());
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = xa % b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % xb;
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % b.getElementDoubleAbs(itb.index + k);
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, ((Number) a.getObjectAbs(ita.index)).doubleValue() % dv);
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = a.getElementDoubleAbs(ita.index + k) % dv;
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.remainder(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
} catch (UnsupportedOperationException ue) {
TestUtils.verbosePrintf("Could not perform this operation: ", ue.getMessage());
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, dv % ((Number) a.getObjectAbs(ita.index)).doubleValue());
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = dv % a.getElementDoubleAbs(ita.index + k);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by rem for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testPower() {
Dataset a, b, c = null, d = null;
Complex zv = new Complex(-3.5, 0);
final double dv = zv.getReal();
long start;
int n;
int eCount = 0;
for (String dn : classes.keySet()) {
final int dtype = classes.get(dn);
Random.seed(12735L);
for (String en : classes.keySet()) {
final int etype = classes.get(en);
TestUtils.verbosePrintf("%s by %s, ", dn, en);
n = 32;
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n).imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
if (etype < Dataset.ARRAYINT8) {
b = Random.randn(n).imultiply(100);
b = b.cast(etype);
} else {
Dataset[] ab = new Dataset[ISIZEB];
for (int j = 0; j < ISIZEB; j++) {
ab[j] = Random.randn(n).imultiply(100);
}
b = DatasetUtils.cast(ab, etype);
}
start = -System.nanoTime();
try {
c = Maths.power(a, b);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start) / c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
int j = 0;
if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow((Complex) b
.getObjectAbs(itb.index)));
j += is;
}
} else if ((dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& !(etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow(new Complex(b.getElementDoubleAbs(itb.index), 0)));
j += is;
}
} else if (!(dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128)
&& (etype == Dataset.COMPLEX64 || etype == Dataset.COMPLEX128)) {
final int is = d.getElementsPerItem();
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j, new Complex(a.getElementDoubleAbs(ita.index), 0).pow((Complex) b.getObjectAbs(itb.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8 && etype < Dataset.ARRAYINT8) {
while (ita.hasNext() && itb.hasNext()) {
d.setObjectAbs(j++, Math.pow(a.getElementDoubleAbs(ita.index),
b.getElementDoubleAbs(itb.index)));
}
} else {
final double[] answer = new double[MAXISIZE];
final int is = d.getElementsPerItem();
if (a.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xa = a.getElementDoubleAbs(ita.index);
for (int k = 0; k < ISIZEB; k++) {
answer[k] = Math.pow(xa, b.getElementDoubleAbs(itb.index + k));
}
d.setObjectAbs(j, answer);
j += is;
}
} else if (b.getElementsPerItem() < is) {
while (ita.hasNext() && itb.hasNext()) {
final double xb = b.getElementDoubleAbs(itb.index);
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k), xb);
}
d.setObjectAbs(j, answer);
j += is;
}
} else {
while (ita.hasNext() && itb.hasNext()) {
for (int k = 0; k < is; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k),
b.getElementDoubleAbs(itb.index + k));
}
d.setObjectAbs(j, answer);
j += is;
}
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start) / d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.
* (otime - ntime) / otime);
checkDatasets(a, b, c, d);
n *= SSTEP;
}
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("%s by constant, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.power(a, dv);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, ((Complex) a.getObjectAbs(ita.index)).pow(zv));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, Math.pow(((Number) a.getObjectAbs(ita.index)).doubleValue(), dv));
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(a.getElementDoubleAbs(ita.index + k), dv);
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(a, dv, c, d);
n *= SSTEP;
}
Random.seed(12735L);
n = 32;
TestUtils.verbosePrintf("constant by %s, ", dn);
for (int i = 0; i < SITER; i++) {
if (dtype < Dataset.ARRAYINT8) {
a = Random.randn(n);
a.imultiply(100);
a = a.cast(dtype);
} else {
Dataset[] aa = new Dataset[ISIZEA];
for (int j = 0; j < ISIZEA; j++) {
aa[j] = Random.randn(n).imultiply(100);
}
a = DatasetUtils.cast(aa, dtype);
}
start = -System.nanoTime();
try {
c = Maths.power(dv, a);
} catch (IllegalArgumentException e) {
TestUtils.verbosePrintf("Could not perform this operation: ", e.getMessage());
eCount++;
continue;
}
start += System.nanoTime();
double ntime = ((double) start)/c.getSize();
d = DatasetFactory.zeros(c);
start = -System.nanoTime();
IndexIterator ita = a.getIterator();
int j = 0;
if (dtype == Dataset.COMPLEX64 || dtype == Dataset.COMPLEX128) {
final int is = d.getElementsPerItem();
while (ita.hasNext()) {
d.setObjectAbs(j, zv.pow((Complex) a.getObjectAbs(ita.index)));
j += is;
}
} else {
if (dtype < Dataset.ARRAYINT8) {
while (ita.hasNext()) {
d.setObjectAbs(j++, Math.pow(dv, ((Number) a.getObjectAbs(ita.index)).doubleValue()));
}
} else {
final double[] answer = new double[ISIZEA];
while (ita.hasNext()) {
for (int k = 0; k < ISIZEA; k++) {
answer[k] = Math.pow(dv, a.getElementDoubleAbs(ita.index + k));
}
d.setObjectAbs(j, answer);
j += ISIZEA;
}
}
}
if (d == null)
break;
start += System.nanoTime();
double otime = ((double) start)/d.getSize();
TestUtils.verbosePrintf("Time taken by pow for %s: %s; %s (%.1f%%)\n", n, otime, ntime, 100.*(otime - ntime)/otime);
checkDatasets(dv, a, c, d);
n *= SSTEP;
}
}
if (eCount > 0) {
TestUtils.verbosePrintf("Number of exceptions caught: %d\n", eCount);
}
}
@Test
public void testDifference() {
int[] data = {0,1,3,9,5,10};
Dataset a = new IntegerDataset(data, null);
Dataset d = Maths.difference(a, 1, -1);
int[] tdata;
tdata = new int[] {1, 2, 6, -4, 5};
Dataset ta = new IntegerDataset(tdata, null);
checkDatasets(null, null, d, ta);
Slice[] slices = new Slice[] {new Slice(3)};
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
slices = new Slice[] {new Slice(-2, null, -1)};
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new ComplexDoubleDataset(new double[] {0, 1, 2, 3, 4, 5});
d = Maths.difference(a, 1, -1);
ta = new ComplexDoubleDataset(new double[] {2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5});
d = Maths.difference(a, 1, -1);
ta = new CompoundDoubleDataset(2, new double[] {2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 1, -1);
ta = Maths.difference(a.getSlice(slices), 1, -1);
checkDatasets(null, null, d, ta);
a = new ByteDataset(new byte[] {0, 1, 2, 4, 7, 11});
d = Maths.difference(a, 2, -1);
ta = new ByteDataset(new byte[] {0, 1, 1, 1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
a = new CompoundShortDataset(2, new short[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.difference(a, 2, -1);
ta = new CompoundShortDataset(2, new short[] {0, 0, 1, -1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.difference(a, 2, -1);
ta = new CompoundDoubleDataset(2, new double[] {0, 0, 1, -1});
checkDatasets(null, null, d, ta);
d = Maths.difference(a.getSliceView(slices), 2, -1);
ta = Maths.difference(a.getSlice(slices), 2, -1);
checkDatasets(null, null, d, ta);
}
@Test
public void testGradient() {
double[] data = {1, 2, 4, 7, 11, 16};
double[] tdata;
Dataset a = new DoubleDataset(data, null);
Dataset d = Maths.gradient(a).get(0);
tdata = new double[] {1., 1.5, 2.5, 3.5, 4.5, 5.};
Dataset ta = new DoubleDataset(tdata, null);
checkDatasets(null, null, d, ta);
Slice[] slices = new Slice[] {new Slice(3)};
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
Dataset b = DatasetFactory.createRange(a.getShape()[0], a.getDtype());
b.imultiply(2);
tdata = new double[] {0.5 , 0.75, 1.25, 1.75, 2.25, 2.5};
ta = new DoubleDataset(tdata, null);
d = Maths.gradient(a, b).get(0);
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices), b.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices), b.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
data = new double[] {1, 2, 6, 3, 4, 5};
a = new DoubleDataset(data, 2, 3);
List<? extends Dataset> l = Maths.gradient(a);
tdata = new double[] { 2., 2., -1., 2., 2., -1.};
ta = new DoubleDataset(tdata, 2, 3);
checkDatasets(null, null, l.get(0), ta);
tdata = new double[] { 1., 2.5, 4., 1., 1., 1.};
ta = new DoubleDataset(tdata, 2, 3);
checkDatasets(null, null, l.get(1), ta);
b = DatasetFactory.createRange(a.getShape()[0], a.getDtype());
b.imultiply(2);
Dataset c = DatasetFactory.createRange(a.getShape()[1], a.getDtype());
c.imultiply(-1.5);
l = Maths.gradient(a, b, c);
tdata = new double[] { 2., 2., -1., 2., 2., -1.};
ta = new DoubleDataset(tdata, 2, 3);
ta.idivide(2);
checkDatasets(null, null, l.get(0), ta);
tdata = new double[] { 1., 2.5, 4., 1., 1., 1.};
ta = new DoubleDataset(tdata, 2, 3);
ta.idivide(-1.5);
checkDatasets(null, null, l.get(1), ta);
a = new ByteDataset(new byte[] {0, 1, 2, 4, 7, 11});
d = Maths.gradient(a).get(0);
ta = new ByteDataset(new byte[] {1, 1, 1, 2, 3, 4});
checkDatasets(null, null, d, ta);
slices = new Slice[] {new Slice(-2, null, -1)};
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new ComplexDoubleDataset(new double[] {0, 1, 2, 3, 4, 5});
d = Maths.gradient(a).get(0);
ta = new ComplexDoubleDataset(new double[] {2, 2, 2, 2, 2, 2});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new CompoundShortDataset(2, new short[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.gradient(a).get(0);
ta = new CompoundShortDataset(2, new short[] {2, 2, 2, 2, 2, 1, 3, 1});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
a = new CompoundDoubleDataset(2, new double[] {0, 1, 2, 3, 4, 5, 7, 6});
d = Maths.gradient(a).get(0);
ta = new CompoundDoubleDataset(2, new double[] {2, 2, 2, 2, 2.5, 1.5, 3, 1});
checkDatasets(null, null, d, ta);
d = Maths.gradient(a.getSliceView(slices)).get(0);
ta = Maths.gradient(a.getSlice(slices)).get(0);
checkDatasets(null, null, d, ta);
}
/**
* Test rounding
*/
@Test
public void testRounding() {
DoubleDataset t;
DoubleDataset x;
double tol = 1e-6;
double[] val = { -1.7, -1.5, -1.2, 0.3, 1.4, 1.5, 1.6 };
t = new DoubleDataset(val);
double[] resFloor = { -2, -2, -2, 0, 1, 1, 1 };
x = (DoubleDataset) Maths.floor(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resFloor[i], x.get(i), tol);
}
double[] resCeil = { -1, -1, -1, 1, 2, 2, 2 };
x = (DoubleDataset) Maths.ceil(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resCeil[i], x.get(i), tol);
}
double[] resRint= { -2, -2, -1, 0, 1, 2, 2 };
x = (DoubleDataset) Maths.rint(t);
for (int i = 0, imax = t.getSize(); i < imax; i++) {
assertEquals(resRint[i], x.get(i), tol);
}
}
private void checkInterpolate(Dataset a, double x) {
int s = a.getShapeRef()[0];
// double v = Maths.interpolate(a, x);
double v = Maths.interpolate(a, new double[] {x});
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0) {
f2 = a.getDouble(0);
} else if (x >= s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolate2(Dataset a, double x) {
int s = a.getShapeRef()[0];
Dataset dv = Maths.interpolate(DatasetFactory.createRange(s, Dataset.INT32), a, DatasetFactory.createFromObject(x), null, null);
double v = dv.getElementDoubleAbs(0);
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0) {
f2 = a.getDouble(0);
} else if (x >= s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolate3(Dataset a, double x) {
int s = a.getShapeRef()[0];
Dataset dv = Maths.interpolate(DatasetFactory.createRange(s, Dataset.INT32), a, DatasetFactory.createFromObject(x), 0, 0);
double v = dv.getElementDoubleAbs(0);
if (x <= -1 || x >= s) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
double f1 = 0;
double f2 = 0;
double t = x - i;
if (x < 0 || x > s - 1) {
} else if (x == s - 1) {
f1 = a.getDouble(i);
} else {
f1 = a.getDouble(i);
f2 = a.getDouble(i + 1);
}
Assert.assertEquals((1 - t) * f1 + t * f2, v, 1e-15);
}
private void checkInterpolateArray(CompoundDataset a, double x) {
int s = a.getShapeRef()[0];
int is = a.getElementsPerItem();
double[] v = new double[is];
Maths.interpolate(v, a, x);
int i = (int) Math.floor(x);
double[] e = new double[is];
double[] f1 = new double[is];
double[] f2 = new double[is];
if (x <= -1 || x >= s) {
} else if (x < 0) {
a.getDoubleArray(f2, 0);
} else if (x >= s - 1) {
a.getDoubleArray(f1, s - 1);
} else {
a.getDoubleArray(f1, i);
a.getDoubleArray(f2, i + 1);
}
double t = x - i;
for (int j = 0; j < is; j++)
e[j] = (1 - t) * f1[j] + t * f2[j];
Assert.assertArrayEquals(e, v, 1e-15);
}
private void checkInterpolate(Dataset a, double x, double y) {
int s0 = a.getShapeRef()[0];
int s1 = a.getShapeRef()[1];
// double v = Maths.interpolate(a, x, y);
double v = Maths.interpolate(a, new double[] {x, y});
if (x <= -1 || x >= s0 || y <= -1 || y >= s1) {
Assert.assertEquals(0, v, 1e-15);
return;
}
int i = (int) Math.floor(x);
int j = (int) Math.floor(y);
double t1 = x - i;
double t2 = y - j;
double f1 = 0, f2 = 0, f3 = 0, f4 = 0;
if (y < 0) {
if (x < 0) {
f4 = a.getDouble(0, 0);
} else if (x >= s0 - 1) {
f3 = a.getDouble(s0 - 1, 0);
} else {
f3 = a.getDouble(i, 0);
f4 = a.getDouble(i + 1, 0);
}
} else if (y >= s1 - 1) {
if (x < 0) {
f2 = a.getDouble(0, s1 - 1);
} else if (x >= s0 - 1) {
f1 = a.getDouble(s0 - 1, s1 - 1);
} else {
f1 = a.getDouble(i, s1 - 1);
f2 = a.getDouble(i + 1, s1 -1);
}
} else {
if (x < 0) {
f2 = a.getDouble(0, j);
f4 = a.getDouble(0, j + 1);
} else if (x >= s0 - 1) {
f1 = a.getDouble(s0 - 1, j);
f3 = a.getDouble(s0 - 1, j + 1);
} else {
f1 = a.getDouble(i, j);
f2 = a.getDouble(i + 1, j);
f3 = a.getDouble(i, j + 1);
f4 = a.getDouble(i + 1, j + 1);
}
}
double r = (1 - t1) * (1 - t2) * f1 + t1 * (1 - t2) * f2 + (1 - t1) * t2 * f3 + t1 * t2 * f4;
Assert.assertEquals(r, v, 1e-15);
v = Maths.interpolate(a, DatasetFactory.ones(a), x, y);
Assert.assertEquals(r, v, 1e-15);
}
private void checkInterpolateArray(CompoundDataset a, double x, double y) {
int s0 = a.getShapeRef()[0];
int s1 = a.getShapeRef()[1];
int is = a.getElementsPerItem();
double[] v = new double[is];
Maths.interpolate(v, a, x, y);
if (x <= -1 || x >= s0 || y <= -1 || y >= s1) {
Assert.assertArrayEquals(new double[is], v, 1e-15);
return;
}
double[] f1 = new double[is];
double[] f2 = new double[is];
double[] f3 = new double[is];
double[] f4 = new double[is];
int i = (int) Math.floor(x);
int j = (int) Math.floor(y);
double t1 = x - i;
double t2 = y - j;
if (y < 0) {
if (x < 0) {
a.getDoubleArray(f4, 0, 0);
} else if (x >= s0 - 1) {
a.getDoubleArray(f3, s0 - 1, 0);
} else {
a.getDoubleArray(f3, i, 0);
a.getDoubleArray(f4, i + 1, 0);
}
} else if (y >= s1 - 1) {
if (x < 0) {
a.getDoubleArray(f2, 0, s1 - 1);
} else if (x >= s0 - 1) {
a.getDoubleArray(f1, s0 - 1, s1 - 1);
} else {
a.getDoubleArray(f1, i, s1 - 1);
a.getDoubleArray(f2, i + 1, s1 -1);
}
} else {
if (x < 0) {
a.getDoubleArray(f2, 0, j);
a.getDoubleArray(f4, 0, j + 1);
} else if (x >= s0 - 1) {
a.getDoubleArray(f1, s0 - 1, j);
a.getDoubleArray(f3, s0 - 1, j + 1);
} else {
a.getDoubleArray(f1, i, j);
a.getDoubleArray(f2, i + 1, j);
a.getDoubleArray(f3, i, j + 1);
a.getDoubleArray(f4, i + 1, j + 1);
}
}
for (j = 0; j < is; j++) {
f1[j] = (1 - t1) * (1 - t2) * f1[j] + t1 * (1 - t2) * f2[j] + (1 - t1) * t2 * f3[j] + t1 * t2 * f4[j];
}
Assert.assertArrayEquals(f1, v, 1e-15);
}
@Test
public void testLinearInterpolation() {
Dataset xa = DatasetFactory.createRange(60, Dataset.INT32);
xa.iadd(1);
double[] xc = {-1.25, -1, -0.25, 0, 0.25, 58.25, 59, 59.25, 60, 60.25};
for (double x : xc) {
// System.out.printf("%g\n", x);
checkInterpolate(xa, x);
checkInterpolate2(xa, x);
checkInterpolate3(xa, x);
}
Dataset xb = DatasetFactory.createRange(120, Dataset.INT32);
xb.setShape(60, 2);
xb.ifloorDivide(2);
xb = DatasetUtils.createCompoundDatasetFromLastAxis(xb, true);
for (double x : xc) {
checkInterpolate(xb, x);
checkInterpolate2(xb, x);
checkInterpolate3(xb, x);
}
AbstractDatasetTest.checkDatasets(Maths.interpolate(DatasetFactory.createFromObject(new double[] {1, 2, 3}), DatasetFactory.createFromObject(new double[] {3, 2, 0}), DatasetFactory.createFromObject(new double[] {0, 1, 1.5, 2.72, 3.14}), 3, 0), DatasetFactory.createFromObject(new double[] {3. , 3. , 2.5 , 0.56, 0.}));
CompoundDataset cxb = (CompoundDataset) xb;
for (double x : xc) {
checkInterpolateArray(cxb, x);
}
xa.setShape(6, 10);
xc = new double[] {-1.25, -1, -0.25, 0, 0.25, 5.25, 6, 6.25, 7};
double[] yc = {-1.25, -1, -0.25, 0, 0.25, 8.25, 9, 9.25, 10, 10.25};
for (double x : xc) {
for (double y : yc) {
// System.out.printf("%g %g\n", x, y);
checkInterpolate(xa, x, y);
}
}
cxb.setShape(6, 10);
// xc = new double[] {-0.25, 0, 0.25, 5.25, 6, 6.25, 7};
// yc = new double[] {9.25, 10, 10.25};
for (double x : xc) {
for (double y : yc) {
// System.out.printf("%g %g\n", x, y);
checkInterpolateArray(cxb, x, y);
}
}
}
@Test
public void testBitwise() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
Dataset xb = DatasetFactory.createRange(8, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 1, 2, 3, 0, 1, 2, 3}),
Maths.bitwiseAnd(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -3, -2, -1, 4, 5, 6, 7}),
Maths.bitwiseOr(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -4, -4, -4, 4, 4, 4, 4}),
Maths.bitwiseXor(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {3, 2, 1, 0, -1, -2, -3, -4}),
Maths.bitwiseInvert(xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-1, -2, -3, -4, -5, -6, -7, -8}),
Maths.bitwiseInvert(xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -6, -8, -8, 0, 32, -128, -128}),
Maths.leftShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 10, 24, 56}),
Maths.leftShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 0, 2, 8, 24}),
Maths.leftShift(xa, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, -2, -1, -1, 0, 0, 0, 0}),
Maths.rightShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 2, 1, 0}),
Maths.rightShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-1, -1, -1, -1, 0, 0, 0, 0}),
Maths.rightShift(xa, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-4, 126, 63, 31, 0, 0, 0, 0}),
Maths.unsignedRightShift(xa, xb), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 4, 2, 1, 0}),
Maths.unsignedRightShift(xb, xa), ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 0, 0, 0, 0, 0, 0, 0}),
Maths.unsignedRightShift(xa, xa), ABSERRD, ABSERRD);
}
@Test
public void testDivideTowardsFloor() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-2, -2, -1, -1, 0, 0, 1, 1}),
Maths.divideTowardsFloor(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {2, 1, 1, 0, 0, -1, -1, -2}),
Maths.divideTowardsFloor(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {-1.6, -1.2, -0.8, -0.4, 0, 0.4, 0.8, 1.2}),
Maths.divideTowardsFloor(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {1.6f, 1.2f, 0.8f, 0.4f, 0 , -0.4f, -0.8f, -1.2f}),
Maths.divideTowardsFloor(xa, -2.5f), true, ABSERRD, ABSERRD);
}
@Test
public void testFloorDivide() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {-2, -2, -1, -1, 0, 0, 1, 1}),
Maths.floorDivide(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {2, 1, 1, 0, 0, -1, -1, -2}),
Maths.floorDivide(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {-2, -2, -1, -1, 0, 0, 0, 1}),
Maths.floorDivide(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {1, 1, 0, 0, 0, -1, -1, -2}),
Maths.floorDivide(xa, -2.5f), true, ABSERRD, ABSERRD);
}
@Test
public void testFloorRemainder() {
Dataset xa = DatasetFactory.createRange(-4, 4, 1, Dataset.INT8);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, 1, 0, 1, 0, 1, 0, 1}),
Maths.floorRemainder(xa, 2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new ByteDataset(new byte[] {0, -1, 0, -1, 0, -1, 0, -1}),
Maths.floorRemainder(xa, -2), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new DoubleDataset(new double[] {1, 2, 0.5, 1.5, 0, 1, 2, 0.5}),
Maths.floorRemainder(xa, 2.5), true, ABSERRD, ABSERRD);
TestUtils.assertDatasetEquals(new FloatDataset(new float[] {-1.5f, -0.5f, -2, -1, 0, -1.5f, -0.5f, -2}),
Maths.floorRemainder(xa, -2.5f), true, ABSERRD, ABSERRD);
}
}
| Ignore remainder | org.eclipse.dawnsci.analysis.dataset.test/src/org/eclipse/dawnsci/analysis/dataset/MathsTest.java | Ignore remainder | <ide><path>rg.eclipse.dawnsci.analysis.dataset.test/src/org/eclipse/dawnsci/analysis/dataset/MathsTest.java
<ide> }
<ide> }
<ide>
<add> @Ignore
<ide> @Test
<ide> public void testRemainder() {
<ide> Dataset a, b, c = null, d = null; |
|
Java | lgpl-2.1 | 66e525c9072eebbef7a874f2d879f21918ac5ae3 | 0 | johnscancella/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs | /*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2003-2005 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.ClassFormatException;
import org.apache.bcel.classfile.ClassParser;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.util.ClassPath;
import edu.umd.cs.findbugs.ba.AnalysisContext;
import edu.umd.cs.findbugs.ba.AnalysisException;
import edu.umd.cs.findbugs.ba.AnalysisFeatures;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.ba.ClassObserver;
import edu.umd.cs.findbugs.ba.URLClassPath;
import edu.umd.cs.findbugs.config.AnalysisFeatureSetting;
import edu.umd.cs.findbugs.config.CommandLine;
import edu.umd.cs.findbugs.config.UserPreferences;
import edu.umd.cs.findbugs.filter.Filter;
import edu.umd.cs.findbugs.filter.FilterException;
import edu.umd.cs.findbugs.filter.Matcher;
import edu.umd.cs.findbugs.plan.AnalysisPass;
import edu.umd.cs.findbugs.plan.ExecutionPlan;
import edu.umd.cs.findbugs.plan.OrderingConstraintException;
import edu.umd.cs.findbugs.visitclass.Constants2;
/**
* An instance of this class is used to apply the selected set of
* analyses on some collection of Java classes. It also implements the
* comand line interface.
*
* @author Bill Pugh
* @author David Hovemeyer
*/
public class FindBugs implements Constants2, ExitCodes {
/* ----------------------------------------------------------------------
* Helper classes
* ---------------------------------------------------------------------- */
/**
* Delegating InputStream wrapper that never closes the
* underlying input stream.
*/
private static class NoCloseInputStream extends DataInputStream {
/**
* Constructor.
* @param in the real InputStream
*/
public NoCloseInputStream(InputStream in) {
super(in);
}
public void close() {
}
}
/**
* Work list item specifying a file/directory/URL containing
* class files to analyze.
*/
private static class ArchiveWorkListItem {
private String fileName;
private boolean explicit;
/**
* Constructor.
*
* @param fileName file/directory/URL
* @param explicit true if this source of classes appeared explicitly
* in the project file, false if was found indirectly
* (e.g., a nested jar file in a .war file)
*/
public ArchiveWorkListItem(String fileName, boolean explicit) {
this.fileName = fileName;
this.explicit = explicit;
}
/**
* Get the file/directory/URL.
*/
public String getFileName() {
return fileName;
}
/**
* Return whether this class source appeared explicitly in
* the project file.
*/
public boolean isExplicit() {
return explicit;
}
}
/**
* Interface for an object representing a source of class files to analyze.
*/
private interface ClassProducer {
/**
* Get the next class to analyze.
*
* @return the class, or null of there are no more classes for this ClassProducer
* @throws IOException if an IOException occurs
* @throws InterruptedException if the thread is interrupted
*/
public JavaClass getNextClass() throws IOException, InterruptedException;
/**
* Did this class producer scan any Java source files?
*/
public boolean containsSourceFiles();
/**
* Close any internal files or streams.
*/
public void close();
}
/**
* ClassProducer for single class files.
*/
private class SingleClassProducer implements ClassProducer {
private URL url;
/**
* Constructor.
*
* @param url the single class file to be analyzed
*/
public SingleClassProducer(URL url) {
this.url = url;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
if (url == null)
return null;
if (Thread.interrupted())
throw new InterruptedException();
URL urlToParse = url;
url = null; // don't return it next time
// ClassScreener may veto this class.
if (!classScreener.matches(urlToParse.toString()))
return null;
try {
return parseClass(urlToParse);
} catch (ClassFormatException e) {
throw new ClassFormatException("Invalid class file format for " +
url.toString() + ": " + e.getMessage());
}
}
public boolean containsSourceFiles() {
return false;
}
public void close() {
// Nothing to do here
}
}
/**
* ClassProducer for zip/jar archives.
*/
private class ZipClassProducer implements ClassProducer {
private URL url;
private LinkedList<ArchiveWorkListItem> archiveWorkList;
private List<String> additionalAuxClasspathEntryList;
private ZipInputStream zipInputStream;
private boolean containsSourceFiles;
public ZipClassProducer(URL url, LinkedList<ArchiveWorkListItem> archiveWorkList,
List<String> additionalAuxClasspathEntryList)
throws IOException {
this.url = url;
this.archiveWorkList = archiveWorkList;
this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList;
if (DEBUG) System.out.println("Opening jar/zip input stream for " + url.toString());
this.zipInputStream = new ZipInputStream(url.openStream());
this.containsSourceFiles = false;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
for (;;) {
if (Thread.interrupted())
throw new InterruptedException();
ZipEntry zipEntry = zipInputStream.getNextEntry();
if (zipEntry == null)
return null;
try {
String entryName = zipEntry.getName();
// ClassScreener may veto this class.
if (!classScreener.matches(entryName)) {
// Add archive URL to aux classpath
if (!additionalAuxClasspathEntryList.contains(url.toString())) {
//System.out.println("Adding additional aux classpath entry: " + url.toString());
additionalAuxClasspathEntryList.add(url.toString());
}
continue;
}
String fileExtension = URLClassPath.getFileExtension(entryName);
if (fileExtension != null) {
if (fileExtension.equals(".class")) {
return parseClass(url.toString(), new NoCloseInputStream(zipInputStream), entryName);
} else if (archiveExtensionSet.contains(fileExtension)) {
// Add nested archive to archive work list
ArchiveWorkListItem nestedItem =
new ArchiveWorkListItem("jar:" + url.toString() + "!/" + entryName, false);
archiveWorkList.addFirst(nestedItem);
} else if (fileExtension.equals(".java")) {
containsSourceFiles = true;
}
}
} finally {
zipInputStream.closeEntry();
}
}
}
public boolean containsSourceFiles() {
return containsSourceFiles;
}
public void close() {
if (zipInputStream != null) {
try {
zipInputStream.close();
} catch (IOException ignore) {
// Ignore
}
}
}
}
/**
* ClassProducer for directories.
* The directory is scanned recursively for class files.
*/
private class DirectoryClassProducer implements ClassProducer {
private String dirName;
private List<String> additionalAuxClasspathEntryList;
private Iterator<String> rfsIter;
private boolean containsSourceFiles;
public DirectoryClassProducer(String dirName,
List<String> additionalAuxClasspathEntryList) throws InterruptedException {
this.dirName = dirName;
this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList;
FileFilter filter = new FileFilter() {
public boolean accept(File file) {
String fileName = file.getName();
if (file.isDirectory() || fileName.endsWith(".class"))
return true;
if (fileName.endsWith(".java"))
containsSourceFiles = true;
return false;
}
};
// This will throw InterruptedException if the thread is
// interrupted.
RecursiveFileSearch rfs = new RecursiveFileSearch(dirName, filter).search();
this.rfsIter = rfs.fileNameIterator();
this.containsSourceFiles = false;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
String fileName;
for (;;) {
if (!rfsIter.hasNext())
return null;
fileName = rfsIter.next();
if (classScreener.matches(fileName)) {
break;
} else {
// Add directory URL to aux classpath
String dirURL= "file:" + dirName;
if (!additionalAuxClasspathEntryList.contains(dirURL)) {
//System.out.println("Adding additional aux classpath entry: " + dirURL);
additionalAuxClasspathEntryList.add(dirURL);
}
}
}
try {
return parseClass(new URL("file:" + fileName));
} catch (ClassFormatException e) {
throw new ClassFormatException("Invalid class file format for " +
fileName + ": " + e.getMessage());
}
}
public boolean containsSourceFiles() {
return containsSourceFiles;
}
public void close() {
// Nothing to do here
}
}
/**
* A delegating bug reporter which counts reported bug instances,
* missing classes, and serious analysis errors.
*/
private static class ErrorCountingBugReporter extends DelegatingBugReporter {
private int bugCount;
private int missingClassCount;
private int errorCount;
private Set<String> missingClassSet = new HashSet<String>();
public ErrorCountingBugReporter(BugReporter realBugReporter) {
super(realBugReporter);
this.bugCount = 0;
this.missingClassCount = 0;
this.errorCount = 0;
// Add an observer to record when bugs make it through
// all priority and filter criteria, so our bug count is
// accurate.
realBugReporter.addObserver(new BugReporterObserver() {
public void reportBug(BugInstance bugInstance) {
++bugCount;
}
});
}
public int getBugCount() {
return bugCount;
}
public int getMissingClassCount() {
return missingClassCount;
}
public int getErrorCount() {
return errorCount;
}
public void logError(String message) {
++errorCount;
super.logError(message);
}
public void reportMissingClass(ClassNotFoundException ex) {
String missing = AbstractBugReporter.getMissingClassName(ex);
if (missingClassSet.add(missing))
++missingClassCount;
super.reportMissingClass(ex);
}
}
private static class CategoryFilteringBugReporter extends DelegatingBugReporter {
private Set<String> categorySet;
public CategoryFilteringBugReporter(BugReporter realBugReporter, Set<String> categorySet) {
super(realBugReporter);
this.categorySet = categorySet;
}
public void reportBug(BugInstance bugInstance) {
BugPattern bugPattern = bugInstance.getBugPattern();
String category = bugPattern.getCategory();
if (categorySet.contains(category))
getRealBugReporter().reportBug(bugInstance);
}
}
/**
* Handling callback for choose() method,
* used to implement the -chooseVisitors and -choosePlugins options.
*/
private interface Chooser {
/**
* Choose a detector, plugin, etc.
*
* @param enable whether or not the item should be enabled
* @param what the item
*/
public void choose(boolean enable, String what);
}
private static final int PRINTING_REPORTER = 0;
private static final int SORTING_REPORTER = 1;
private static final int XML_REPORTER = 2;
private static final int EMACS_REPORTER = 3;
private static final int HTML_REPORTER = 4;
private static final int XDOCS_REPORTER = 5;
public static final AnalysisFeatureSetting[] MIN_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, true),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, false),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, false),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false),
};
public static final AnalysisFeatureSetting[] DEFAULT_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false),
};
public static final AnalysisFeatureSetting[] MAX_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, true),
};
/**
* Helper class to parse the command line and create
* the FindBugs engine object.
*/
private static class TextUICommandLine extends FindBugsCommandLine {
private int bugReporterType = PRINTING_REPORTER;
private boolean relaxedReportingMode = false;
private boolean useLongBugCodes = false;
private boolean xmlWithMessages = false;
private String stylesheet = null;
private boolean quiet = false;
private ClassScreener classScreener = new ClassScreener();
private String filterFile = null;
private boolean include = false;
private boolean setExitCode = false;
private int priorityThreshold = Detector.NORMAL_PRIORITY;
private PrintStream outputStream = null;
private Set<String> bugCategorySet = null;
private UserPreferences userPreferences = UserPreferences.createDefaultUserPreferences();
private String trainingOutputDir;
private String trainingInputDir;
public TextUICommandLine() {
super();
addSwitch("-showPlugins", "show list of available plugins");
addSwitch("-quiet", "suppress error messages");
addSwitch("-longBugCodes", "report long bug codes");
addSwitch("-experimental", "report all warnings including experimental bug patterns");
addSwitch("-low", "report all warnings");
addSwitch("-medium", "report only medium and high priority warnings [default]");
addSwitch("-high", "report only high priority warnings");
addSwitch("-sortByClass", "sort warnings by class");
addSwitchWithOptionalExtraPart("-xml", "withMessages",
"XML output (optionally with messages)");
addSwitch("-xdocs", "xdoc XML output to use with Apache Maven");
addSwitchWithOptionalExtraPart("-html", "stylesheet",
"Generate HTML output (default stylesheet is default.xsl)");
addSwitch("-emacs", "Use emacs reporting format");
addSwitch("-relaxed", "Relaxed reporting mode (more false positives!)");
addSwitchWithOptionalExtraPart("-train", "outputDir",
"Save training data (experimental); output dir defaults to '.'");
addSwitchWithOptionalExtraPart("-useTraining", "inputDir",
"Use training data (experimental); input dir defaults to '.'");
addOption("-outputFile", "filename", "Save output in named file");
addOption("-visitors", "v1[,v2...]", "run only named visitors");
addOption("-omitVisitors", "v1[,v2...]", "omit named visitors");
addOption("-chooseVisitors", "+v1,-v2,...", "selectively enable/disable detectors");
addOption("-choosePlugins", "+p1,-p2,...", "selectively enable/disable plugins");
addOption("-adjustPriority", "v1=(raise|lower)[,...]",
"raise/lower priority of warnings for given visitor(s)");
addOption("-bugCategories", "cat1[,cat2...]", "only report bugs in given categories");
addOption("-onlyAnalyze", "classes/packages", "only analyze given classes and packages");
addOption("-exclude", "filter file", "exclude bugs matching given filter");
addOption("-include", "filter file", "include only bugs matching given filter");
addOption("-auxclasspath", "classpath", "set aux classpath for analysis");
addOption("-sourcepath", "source path", "set source path for analyzed classes");
addSwitch("-exitcode", "set exit code of process");
}
public Project getProject() {
return project;
}
public boolean setExitCode() {
return setExitCode;
}
public boolean quiet() {
return quiet;
}
protected void handleOption(String option, String optionExtraPart) {
if (option.equals("-showPlugins")) {
System.out.println("Available plugins:");
int count = 0;
for (Iterator<Plugin> i = DetectorFactoryCollection.instance().pluginIterator(); i.hasNext(); ) {
Plugin plugin = i.next();
System.out.println(" " + plugin.getPluginId() + " (default: " +
(plugin.isEnabled() ? "enabled" : "disabled") + ")");
if (plugin.getShortDescription() != null)
System.out.println(" Description: " + plugin.getShortDescription());
if (plugin.getProvider() != null)
System.out.println(" Provider: " + plugin.getProvider());
if (plugin.getWebsite() != null)
System.out.println(" Website: " + plugin.getWebsite());
++count;
}
if (count == 0) {
System.out.println(" No plugins are available (FindBugs installed incorrectly?)");
}
System.exit(0);
} else if (option.equals("-experimental"))
priorityThreshold = Detector.EXP_PRIORITY;
else if (option.equals("-longBugCodes"))
useLongBugCodes = true;
else if (option.equals("-low"))
priorityThreshold = Detector.LOW_PRIORITY;
else if (option.equals("-medium"))
priorityThreshold = Detector.NORMAL_PRIORITY;
else if (option.equals("-high"))
priorityThreshold = Detector.HIGH_PRIORITY;
else if (option.equals("-sortByClass"))
bugReporterType = SORTING_REPORTER;
else if (option.equals("-xml")) {
bugReporterType = XML_REPORTER;
if (!optionExtraPart.equals("")) {
if (optionExtraPart.equals("withMessages"))
xmlWithMessages = true;
else
throw new IllegalArgumentException("Unknown option: -xml:" + optionExtraPart);
}
} else if (option.equals("-emacs")) {
bugReporterType = EMACS_REPORTER;
} else if (option.equals("-relaxed")) {
relaxedReportingMode = true;
} else if (option.equals("-train")) {
trainingOutputDir = !optionExtraPart.equals("") ? optionExtraPart : ".";
} else if (option.equals("-useTraining")) {
trainingInputDir = !optionExtraPart.equals("") ? optionExtraPart : ".";
} else if (option.equals("-html")) {
bugReporterType = HTML_REPORTER;
if (!optionExtraPart.equals("")) {
stylesheet = optionExtraPart;
} else {
stylesheet = "default.xsl";
}
} else if (option.equals("-xdocs"))
bugReporterType = XDOCS_REPORTER;
else if (option.equals("-quiet"))
quiet = true;
else if (option.equals("-exitcode"))
setExitCode = true;
else {
super.handleOption(option, optionExtraPart);
}
}
protected void handleOptionWithArgument(String option, String argument) throws IOException {
if (option.equals("-outputFile")) {
String outputFile = argument;
try {
outputStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outputFile)));
} catch (IOException e) {
System.err.println("Couldn't open " + outputFile + " for output: " + e.toString());
System.exit(1);
}
} else if (option.equals("-visitors") || option.equals("-omitVisitors")) {
boolean omit = option.equals("-omitVisitors");
if (!omit) {
// Selecting detectors explicitly, so start out by
// disabling all of them. The selected ones will
// be re-enabled.
userPreferences.enableAllDetectors(false);
}
// Explicitly enable or disable the selected detectors.
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String visitorName = tok.nextToken();
DetectorFactory factory = DetectorFactoryCollection.instance().getFactory(visitorName);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + visitorName);
userPreferences.enableDetector(factory, !omit);
}
} else if (option.equals("-chooseVisitors")) {
// This is like -visitors and -omitVisitors, but
// you can selectively enable and disable detectors,
// starting from the default set (or whatever set
// happens to be in effect).
choose(argument, "Detector choices", new Chooser() {
public void choose(boolean enabled, String what) {
DetectorFactory factory = DetectorFactoryCollection.instance()
.getFactory(what);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + what);
if (DEBUG) {
System.err.println("Detector " + factory.getShortName() + " " +
(enabled ? "enabled" : "disabled") +
", userPreferences="+System.identityHashCode(userPreferences));
}
userPreferences.enableDetector(factory, enabled);
}
});
} else if (option.equals("-choosePlugins")) {
// Selectively enable/disable plugins
choose(argument, "Plugin choices", new Chooser() {
public void choose(boolean enabled, String what) {
Plugin plugin = DetectorFactoryCollection.instance().getPluginById(what);
if (plugin == null)
throw new IllegalArgumentException("Unknown plugin: " + what);
plugin.setEnabled(enabled);
}
});
} else if (option.equals("-adjustPriority")) {
// Selectively raise or lower the priority of warnings
// produced by specified detectors.
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String token = tok.nextToken();
int eq = token.indexOf('=');
if (eq < 0)
throw new IllegalArgumentException("Illegal priority adjustment: " + token);
String visitorName = token.substring(0, eq);
DetectorFactory factory = DetectorFactoryCollection.instance()
.getFactory(visitorName);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + visitorName);
String adjustment = token.substring(eq + 1);
if (!(adjustment.equals("raise") || adjustment.equals("lower")))
throw new IllegalArgumentException("Illegal priority adjustment value: " +
adjustment);
// Recall that lower values are higher priorities
factory.setPriorityAdjustment(adjustment.equals("raise") ? -1 : +1);
}
} else if (option.equals("-bugCategories")) {
this.bugCategorySet = handleBugCategories(userPreferences, argument);
} else if (option.equals("-onlyAnalyze")) {
// The argument is a comma-separated list of classes and packages
// to select to analyze. (If a list item ends with ".*",
// it specifies a package, otherwise it's a class.)
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String item = tok.nextToken();
if (item.endsWith(".-"))
classScreener.addAllowedPrefix(item.substring(0, item.length() - 1));
else if (item.endsWith(".*"))
classScreener.addAllowedPackage(item.substring(0, item.length() - 1));
else
classScreener.addAllowedClass(item);
}
} else if (option.equals("-exclude") || option.equals("-include")) {
filterFile = argument;
include = option.equals("-include");
} else if (option.equals("-auxclasspath")) {
StringTokenizer tok = new StringTokenizer(argument, File.pathSeparator);
while (tok.hasMoreTokens())
project.addAuxClasspathEntry(tok.nextToken());
} else if (option.equals("-sourcepath")) {
StringTokenizer tok = new StringTokenizer(argument, File.pathSeparator);
while (tok.hasMoreTokens())
project.addSourceDir(new File(tok.nextToken()).getAbsolutePath());
} else {
super.handleOptionWithArgument(option, argument);
}
}
/**
* Common handling code for -chooseVisitors and -choosePlugins options.
*
* @param argument the list of visitors or plugins to be chosen
* @param desc String describing what is being chosen
* @param chooser callback object to selectively choose list members
*/
private void choose(String argument, String desc, Chooser chooser) {
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String what = tok.nextToken();
if (!what.startsWith("+") && !what.startsWith("-"))
throw new IllegalArgumentException(desc + " must start with " +
"\"+\" or \"-\" (saw " + what + ")");
boolean enabled = what.startsWith("+");
chooser.choose(enabled, what.substring(1));
}
}
public FindBugs createEngine() throws IOException, FilterException {
TextUIBugReporter textuiBugReporter = null;
switch (bugReporterType) {
case PRINTING_REPORTER:
textuiBugReporter = new PrintingBugReporter();
break;
case SORTING_REPORTER:
textuiBugReporter = new SortingBugReporter();
break;
case XML_REPORTER:
{
XMLBugReporter xmlBugReporter = new XMLBugReporter(project);
xmlBugReporter.setAddMessages(xmlWithMessages);
textuiBugReporter = xmlBugReporter;
}
break;
case EMACS_REPORTER:
textuiBugReporter = new EmacsBugReporter();
break;
case HTML_REPORTER:
textuiBugReporter = new HTMLBugReporter(project, stylesheet);
break;
case XDOCS_REPORTER:
textuiBugReporter = new XDocsBugReporter(project);
break;
default:
throw new IllegalStateException();
}
if (quiet)
textuiBugReporter.setErrorVerbosity(BugReporter.SILENT);
textuiBugReporter.setPriorityThreshold(priorityThreshold);
textuiBugReporter.setUseLongBugCodes(useLongBugCodes);
if (outputStream != null)
textuiBugReporter.setOutputStream(outputStream);
BugReporter bugReporter = textuiBugReporter;
if (bugCategorySet != null) {
bugReporter = new CategoryFilteringBugReporter(bugReporter, bugCategorySet);
}
FindBugs findBugs = new FindBugs(bugReporter, project);
findBugs.setUserPreferences(userPreferences);
if (filterFile != null)
findBugs.setFilter(filterFile, include);
findBugs.setClassScreener(classScreener);
findBugs.setRelaxedReportingMode(relaxedReportingMode);
if (trainingOutputDir != null) {
findBugs.enableTrainingOutput(trainingOutputDir);
}
if (trainingInputDir != null) {
findBugs.enableTrainingInput(trainingInputDir);
}
findBugs.setAnalysisFeatureSettings(settingList);
return findBugs;
}
}
/* ----------------------------------------------------------------------
* Member variables
* ---------------------------------------------------------------------- */
public static final boolean DEBUG = Boolean.getBoolean("findbugs.debug");
/**
* FindBugs home directory.
*/
private static String home;
/**
* File extensions that indicate an archive (zip, jar, or similar).
*/
static public final Set<String> archiveExtensionSet = new HashSet<String>();
static {
archiveExtensionSet.add(".jar");
archiveExtensionSet.add(".zip");
archiveExtensionSet.add(".war");
archiveExtensionSet.add(".ear");
archiveExtensionSet.add(".sar");
}
/**
* Known URL protocols.
* Filename URLs that do not have an explicit protocol are
* assumed to be files.
*/
static public final Set<String> knownURLProtocolSet = new HashSet<String>();
static {
knownURLProtocolSet.add("file");
knownURLProtocolSet.add("http");
knownURLProtocolSet.add("https");
knownURLProtocolSet.add("jar");
}
private ErrorCountingBugReporter bugReporter;
private boolean relaxedReportingMode;
private Project project;
private UserPreferences userPreferences;
private List<ClassObserver> classObserverList;
private ExecutionPlan executionPlan;
private FindBugsProgress progressCallback;
private ClassScreener classScreener;
private AnalysisContext analysisContext;
private String currentClass;
private Map<String,Long> detectorTimings;
private boolean useTrainingInput;
private boolean emitTrainingOutput;
private String trainingInputDir;
private String trainingOutputDir;
//private boolean runSlowFirstPassDetectors;
private AnalysisFeatureSetting[] settingList = DEFAULT_EFFORT;
private int passCount;
/* ----------------------------------------------------------------------
* Public methods
* ---------------------------------------------------------------------- */
/**
* Constructor.
*
* @param bugReporter the BugReporter object that will be used to report
* BugInstance objects, analysis errors, class to source mapping, etc.
* @param project the Project indicating which files to analyze and
* the auxiliary classpath to use; note that the FindBugs
* object will create a private copy of the Project object
*/
public FindBugs(BugReporter bugReporter, Project project) {
if (bugReporter == null)
throw new IllegalArgumentException("null bugReporter");
if (project == null)
throw new IllegalArgumentException("null project");
this.bugReporter = new ErrorCountingBugReporter(bugReporter);
this.relaxedReportingMode = false;
this.project = project.duplicate();
this.userPreferences = UserPreferences.createDefaultUserPreferences();
this.classObserverList = new LinkedList<ClassObserver>();
// Create a no-op progress callback.
this.progressCallback = new FindBugsProgress() {
public void reportNumberOfArchives(int numArchives) {
}
public void finishArchive() {
}
public void startAnalysis(int numClasses) {
}
public void finishClass() {
}
public void finishPerClassAnalysis() {
}
};
// Class screener
this.classScreener = new ClassScreener();
addClassObserver(bugReporter);
}
/**
* Set the progress callback that will be used to keep track
* of the progress of the analysis.
*
* @param progressCallback the progress callback
*/
public void setProgressCallback(FindBugsProgress progressCallback) {
this.progressCallback = progressCallback;
}
/**
* Set filter of bug instances to include or exclude.
*
* @param filterFileName the name of the filter file
* @param include true if the filter specifies bug instances to include,
* false if it specifies bug instances to exclude
*/
public void setFilter(String filterFileName, boolean include) throws IOException, FilterException {
Filter filter = new Filter(filterFileName);
BugReporter origBugReporter = bugReporter.getRealBugReporter();
BugReporter filterBugReporter = new FilterBugReporter(origBugReporter, (Matcher)filter, include);
bugReporter.setRealBugReporter(filterBugReporter);
}
/**
* Set the UserPreferences representing which Detectors
* should be used. If UserPreferences are not set explicitly,
* the default set of Detectors will be used.
*
* @param userPreferences the UserPreferences
*/
public void setUserPreferences(UserPreferences userPreferences) {
this.userPreferences = userPreferences;
}
/**
* Add a ClassObserver.
*
* @param classObserver the ClassObserver
*/
public void addClassObserver(ClassObserver classObserver) {
classObserverList.add(classObserver);
}
/**
* Set the ClassScreener.
* This object chooses which individual classes to analyze.
* By default, all classes are analyzed.
*
* @param classScreener the ClassScreener to use
*/
public void setClassScreener(ClassScreener classScreener) {
this.classScreener = classScreener;
}
/**
* Set relaxed reporting mode.
*
* @param relaxedReportingMode true if relaxed reporting mode should be enabled,
* false if not
*/
public void setRelaxedReportingMode(boolean relaxedReportingMode) {
this.relaxedReportingMode = relaxedReportingMode;
}
/**
* Set whether or not training output should be emitted.
*
* @param trainingOutputDir directory to save training output in
*/
public void enableTrainingOutput(String trainingOutputDir) {
this.emitTrainingOutput = true;
this.trainingOutputDir = trainingOutputDir;
}
/**
* Set whether or not training input should be used to
* make the analysis more precise.
*
* @param trainingInputDir directory to load training input from
*/
public void enableTrainingInput(String trainingInputDir) {
this.useTrainingInput = true;
this.trainingInputDir = trainingInputDir;
}
/**
* Set analysis feature settings.
*
* @param settingList list of analysis feature settings
*/
public void setAnalysisFeatureSettings(AnalysisFeatureSetting[] settingList) {
if (settingList != null)
this.settingList = settingList;
}
/**
* Execute FindBugs on the Project.
* All bugs found are reported to the BugReporter object which was set
* when this object was constructed.
*
* @throws java.io.IOException if an I/O exception occurs analyzing one of the files
* @throws InterruptedException if the thread is interrupted while conducting the analysis
*/
public void execute() throws java.io.IOException, InterruptedException {
// Configure the analysis context
analysisContext = new AnalysisContext(bugReporter);
analysisContext.setSourcePath(project.getSourceDirList());
// Enable/disable relaxed reporting mode
FindBugsAnalysisFeatures.setRelaxedMode(relaxedReportingMode);
// Enable input/output of interprocedural property databases
if (emitTrainingOutput) {
if (!new File(trainingOutputDir).isDirectory())
throw new IOException("Training output directory " + trainingOutputDir + " does not exist");
AnalysisContext.currentAnalysisContext().setDatabaseOutputDir(trainingOutputDir);
// XXX: hack
System.setProperty("findbugs.checkreturn.savetraining", new File(trainingOutputDir, "checkReturn.db").getPath());
}
if (useTrainingInput) {
if (!new File(trainingInputDir).isDirectory())
throw new IOException("Training input directory " + trainingInputDir + " does not exist");
AnalysisContext.currentAnalysisContext().setDatabaseInputDir(trainingInputDir);
AnalysisContext.currentAnalysisContext().loadInterproceduralDatabases();
// XXX: hack
System.setProperty("findbugs.checkreturn.loadtraining", new File(trainingInputDir, "checkReturn.db").getPath());
}
configureAnalysisFeatures();
// Give the BugReporter a reference to this object,
// in case it wants to access information such
// as the AnalysisContext
bugReporter.setEngine(this);
// Create execution plan
try {
createExecutionPlan();
} catch (OrderingConstraintException e) {
IOException ioe = new IOException("Invalid detector ordering constraints");
ioe.initCause(e);
throw ioe;
}
// Clear the repository of classes
analysisContext.clearRepository();
// Get list of files to analyze.
// Note that despite the name getJarFileArray(),
// they can also be zip files, directories,
// and single class files.
LinkedList<ArchiveWorkListItem> archiveWorkList = new LinkedList<ArchiveWorkListItem>();
for (Iterator<String> i = project.getFileList().iterator(); i.hasNext(); ) {
String fileName = i.next();
archiveWorkList.add(new ArchiveWorkListItem(fileName, true));
}
// Report how many archives/directories/files will be analyzed,
// for progress dialog in GUI
progressCallback.reportNumberOfArchives(archiveWorkList.size());
// Keep track of the names of all classes to be analyzed
List<String> repositoryClassList = new LinkedList<String>();
// set the initial repository classpath.
setRepositoryClassPath();
// Record additional entries that should be added to
// the aux classpath. These occur when one or more classes
// in a directory or archive are skipped, to ensure that
// the skipped classes can still be referenced.
List<String> additionalAuxClasspathEntryList = new LinkedList<String>();
// Add all classes in analyzed archives/directories/files
while (!archiveWorkList.isEmpty()) {
ArchiveWorkListItem item = archiveWorkList.removeFirst();
scanArchiveOrDirectory(item, archiveWorkList, repositoryClassList,
additionalAuxClasspathEntryList);
}
// Add "extra" aux classpath entries needed to ensure that
// skipped classes can be referenced.
addCollectionToClasspath(additionalAuxClasspathEntryList);
// Examine all classes for bugs.
// Don't examine the same class more than once.
// (The user might specify two jar files that contain
// the same class.)
if (DEBUG)
detectorTimings = new HashMap<String,Long>();
// Execute each analysis pass in the execution plan
for (Iterator<AnalysisPass> i = executionPlan.passIterator(); i.hasNext();) {
AnalysisPass analysisPass = i.next();
analysisPass.createDetectors(bugReporter);
executeAnalysisPass(analysisPass, repositoryClassList);
// Clear the ClassContext cache.
// It may contain data that should be recomputed on the next pass.
analysisContext.clearClassContextCache();
}
// Flush any queued bug reports
bugReporter.finish();
// Flush any queued error reports
bugReporter.reportQueuedErrors();
// Free up memory for reports
analysisContext.clearRepository();
}
/**
* Get the analysis context.
* It is only valid to call this method after the execute()
* method has been called.
*/
public AnalysisContext getAnalysisContext() {
return analysisContext;
}
/**
* Get the name of the most recent class to be analyzed.
* This is useful for diagnosing an unexpected exception.
* Returns null if no class has been analyzed.
*/
public String getCurrentClass() {
return currentClass;
}
/**
* Get the number of bug instances that were reported during analysis.
*/
public int getBugCount() {
return bugReporter.getBugCount();
}
/**
* Get the number of errors that occurred during analysis.
*/
public int getErrorCount() {
return bugReporter.getErrorCount();
}
/**
* Get the number of time missing classes were reported during analysis.
*/
public int getMissingClassCount() {
return bugReporter.getMissingClassCount();
}
/**
* Set the FindBugs home directory.
*/
public static void setHome(String home) {
FindBugs.home = home;
}
/**
* Get the FindBugs home directory.
*/
public static String getHome() {
if (home == null) {
home = System.getProperty("findbugs.home");
if (home == null) {
System.err.println("Error: The findbugs.home property is not set!");
}
}
return home;
}
/* ----------------------------------------------------------------------
* Private methods
* ---------------------------------------------------------------------- */
/**
* Configure analysis features.
*/
private void configureAnalysisFeatures() {
for (AnalysisFeatureSetting setting : settingList) {
setting.configure(analysisContext);
}
}
/**
* Create the ExecutionPlan.
*
* @throws OrderingConstraintException
*/
private void createExecutionPlan() throws OrderingConstraintException {
executionPlan = new ExecutionPlan();
// Only enabled detectors should be part of the execution plan
executionPlan.setDetectorFactoryChooser(new DetectorFactoryChooser() {
public boolean choose(DetectorFactory factory) {
boolean enabled = isDetectorEnabled(factory);
// if (ExecutionPlan.DEBUG) {
// System.out.println(factory.getShortName() + ": enabled=" + enabled);
// }
return enabled;
}
});
// Add plugins
for (Iterator<Plugin> i = DetectorFactoryCollection.instance().pluginIterator(); i.hasNext();) {
Plugin plugin = i.next();
executionPlan.addPlugin(plugin);
}
// Build the plan
executionPlan.build();
}
/**
* Determing whether or not given DetectorFactory should be enabled.
*
* @param factory the DetectorFactory
* @return true if the DetectorFactory should be enabled, false otherwise
*/
private boolean isDetectorEnabled(DetectorFactory factory) {
if (!factory.getPlugin().isEnabled())
return false;
if (!userPreferences.isDetectorEnabled(factory))
return false;
if (!factory.isEnabledForCurrentJRE())
return false;
// Slow first pass detectors are usually disabled, but may be explicitly enabled
if (!analysisContext.getBoolProperty(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS)
&& factory.isDetectorClassSubtypeOf(InterproceduralFirstPassDetector.class))
return false;
// Training detectors are enabled if, and only if, we are emitting training output
// XXX: grotesque hack for NoteCheckReturnValue. Need to fix for real.
boolean isTrainingDetector = factory.isDetectorClassSubtypeOf(TrainingDetector.class)
|| (emitTrainingOutput && factory.getFullName().equals("edu.umd.cs.findbugs.detect.NoteCheckReturnValue"));
if (isTrainingDetector != emitTrainingOutput)
return false;
return true;
}
/**
* Based on Project settings, set the classpath to be used
* by the Repository when looking up classes.
* @throws IOException
*/
private void setRepositoryClassPath() {
// Set aux classpath entries
addCollectionToClasspath(project.getAuxClasspathEntryList());
// Set implicit classpath entries
addCollectionToClasspath(project.getImplicitClasspathEntryList());
// Add system classpath entries
String systemClassPath = ClassPath.getClassPath();
StringTokenizer tok = new StringTokenizer(systemClassPath, File.pathSeparator);
while (tok.hasMoreTokens()) {
String entry = tok.nextToken();
try {
analysisContext.addClasspathEntry(entry);
} catch (IOException e) {
bugReporter.logError("Warning: could not add URL " +
entry + " to classpath", e);
}
}
}
/**
* Add all classpath entries in given Collection to the given
* URLClassPathRepository. Missing entries are not fatal:
* we'll log them as analysis errors, but the analysis can
* continue.
*
* @param collection classpath entries to add
* @param repository URLClassPathRepository to add the entries to
*/
private void addCollectionToClasspath(Collection<String> collection) {
for (Iterator<String> i = collection.iterator(); i.hasNext(); ) {
String entry = i.next();
try {
//repository.addURL(entry);
analysisContext.addClasspathEntry(entry);
} catch (IOException e) {
bugReporter.logError("Warning: could not add URL " +
entry + " to classpath", e);
}
}
}
/**
* Add all classes contained in given file or directory to the BCEL Repository.
*
* @param item work list item representing the file, which may be a jar/zip
* archive, a single class file, or a directory to be recursively
* searched for class files
* @param archiveWorkList work list of archives to analyze: this method
* may add to the work list if it finds nested archives
* @param repositoryClassList a List to which all classes found in
* the archive or directory are added, so we later know
* which files to analyze
*/
private void scanArchiveOrDirectory(ArchiveWorkListItem item,
LinkedList<ArchiveWorkListItem> archiveWorkList, List<String> repositoryClassList,
List<String> additionalAuxClasspathEntryList)
throws IOException, InterruptedException {
String fileName = item.getFileName();
ClassProducer classProducer = null;
try {
// Create a URL for the filename.
// The protocol defaults to "file" if not explicitly
// specified in the filename.
String protocol = URLClassPath.getURLProtocol(fileName);
if (protocol == null) {
protocol = "file";
fileName = "file:" + fileName;
}
URL url = new URL(fileName);
// Figure out the file extension
String fileExtension = null;
int lastDot = fileName.lastIndexOf('.');
if (lastDot >= 0) {
fileExtension = fileName.substring(lastDot);
}
// Create the ClassProducer
if (fileExtension != null && URLClassPath.isArchiveExtension(fileExtension))
classProducer = new ZipClassProducer(url, archiveWorkList, additionalAuxClasspathEntryList);
else if (fileExtension != null && fileExtension.equals(".class"))
classProducer = new SingleClassProducer(url);
else if (protocol.equals("file")) {
// Assume it's a directory
fileName = fileName.substring("file:".length());
File dir = new File(fileName);
if (!dir.isDirectory())
throw new IOException("Path " + fileName + " is not an archive, class file, or directory");
classProducer = new DirectoryClassProducer(fileName, additionalAuxClasspathEntryList);
} else
throw new IOException("URL " + fileName + " is not an archive, class file, or directory");
// Load all referenced classes into the Repository
for (; ;) {
if (Thread.interrupted())
throw new InterruptedException();
try {
JavaClass jclass = classProducer.getNextClass();
if (jclass == null)
break;
if (DEBUG) System.out.println("Scanned " + jclass.getClassName());
analysisContext.addApplicationClassToRepository(jclass);
repositoryClassList.add(jclass.getClassName());
} catch (ClassFormatException e) {
if (DEBUG) e.printStackTrace();
bugReporter.logError("Invalid classfile format", e);
}
}
if (item.isExplicit())
progressCallback.finishArchive();
// If the archive or directory scanned contained source files,
// add it to the end of the source path.
if (classProducer.containsSourceFiles())
project.addSourceDir(fileName);
} catch (IOException e) {
// You'd think that the message for a FileNotFoundException would include
// the filename, but you'd be wrong. So, we'll add it explicitly.
throw new IOException("Could not analyze " + fileName + ": " + e.getMessage());
} finally {
if (classProducer != null) {
classProducer.close();
}
}
}
/**
* Execute a single AnalysisPass.
*
* @param analysisPass the AnalysisPass
* @param repositoryClassList list of application classes in the repository
* @throws InterruptedException
*/
private void executeAnalysisPass(AnalysisPass analysisPass, List<String> repositoryClassList) throws InterruptedException {
// Callback for progress dialog: analysis is starting
progressCallback.startAnalysis(repositoryClassList.size());
if (ExecutionPlan.DEBUG) {
System.out.println("************* Analysis pass " + (passCount++) + " *************");
for (Iterator<DetectorFactory> i = analysisPass.iterator(); i.hasNext();) {
DetectorFactory factory = i.next();
System.out.println("\t" + factory.getFullName());
}
}
// Examine each class in the application
Set<String> examinedClassSet = new HashSet<String>();
for (Iterator<String> i = repositoryClassList.iterator(); i.hasNext();) {
String className = i.next();
if (examinedClassSet.add(className))
examineClass(analysisPass, className);
}
if (DEBUG) {
long total = 0;
Iterator<Long> timingsIt = detectorTimings.values().iterator();
while (timingsIt.hasNext()) {
total += timingsIt.next().longValue();
}
System.out.println();
System.out.println("Detector Timings");
Iterator<Map.Entry<String,Long>> it = detectorTimings.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String,Long> entry = it.next();
String detectorName = entry.getKey();
long detectorTime = entry.getValue().longValue();
System.out.println(detectorName + ": " + detectorTime + " ms -> (" + (detectorTime * 100.0f / (float)total) + ") %");
}
System.out.println();
detectorTimings = new HashMap<String,Long>();
}
// Callback for progress dialog: analysis finished
progressCallback.finishPerClassAnalysis();
// Force any detectors which defer work until all classes have
// been seen to do that work.
this.reportFinal(analysisPass.getDetectorList());
}
/**
* Examine a single class by invoking all of the Detectors on it.
*
* @param className the fully qualified name of the class to examine
*/
private void examineClass(AnalysisPass analysisPass, String className) throws InterruptedException {
if (DEBUG) System.out.println("Examining class " + className);
this.currentClass = className;
Detector[] detectors = analysisPass.getDetectorList();
try {
JavaClass javaClass = Repository.lookupClass(className);
// Notify ClassObservers
for (Iterator<ClassObserver> i = classObserverList.iterator(); i.hasNext();) {
i.next().observeClass(javaClass);
}
// Create a ClassContext for the class
ClassContext classContext = analysisContext.getClassContext(javaClass);
// Run the Detectors
for (int i = 0; i < detectors.length; ++i) {
if (Thread.interrupted())
throw new InterruptedException();
Detector detector = detectors[i];
if (detector instanceof StatelessDetector) {
try {
detector = (Detector)((StatelessDetector)detector).clone();
} catch (CloneNotSupportedException cnfe) {
detector = detectors[i]; // this shouldn't happen
}
}
try {
long start = 0, end;
if (DEBUG) {
System.out.println(" running " + detector.getClass().getName());
start = System.currentTimeMillis();
}
detector.visitClassContext(classContext);
if (DEBUG) {
end = System.currentTimeMillis();
long delta = end - start;
String detectorName = detector.getClass().getName();
Long total = detectorTimings.get(detectorName);
if (total == null)
total = new Long(delta);
else
total = new Long(total.longValue() + delta);
detectorTimings.put(detectorName, total);
}
} catch (AnalysisException e) {
reportRecoverableDetectorException(className, detector, e);
} catch (ArrayIndexOutOfBoundsException e) {
reportRecoverableDetectorException(className, detector, e);
} catch (ClassCastException e) {
reportRecoverableDetectorException(className, detector, e);
}
}
} catch (ClassNotFoundException e) {
// This should never happen unless there are bugs in BCEL.
bugReporter.reportMissingClass(e);
reportRecoverableException(className, e);
} catch (ClassFormatException e) {
reportRecoverableException(className, e);
}
catch (RuntimeException re) {
RuntimeException annotatedEx;
try {
String sep = System.getProperty("line.separator");
Constructor<? extends RuntimeException> c = re.getClass().getConstructor(new Class[] { String.class });
String msg = re.getMessage();
msg = sep + "While finding bugs in class: " + className + ((msg == null) ? "" : (sep + msg));
annotatedEx = c.newInstance(new Object[] {msg});
annotatedEx.setStackTrace(re.getStackTrace());
} catch (Exception e) {
throw re;
}
throw annotatedEx;
}
progressCallback.finishClass();
}
private void reportRecoverableException(String className, Exception e) {
if (DEBUG) {
e.printStackTrace();
}
bugReporter.logError("Exception analyzing " + className, e);
}
private void reportRecoverableDetectorException(String className, Detector detector, Exception e) {
if (DEBUG) {
e.printStackTrace();
}
bugReporter.logError("Exception analyzing " + className +
" using detector " + detector.getClass().getName(), e);
}
/**
* Call report() on all detectors, to give them a chance to
* report any accumulated bug reports.
*/
private void reportFinal(Detector[] detectors) throws InterruptedException {
for (int i = 0; i < detectors.length; ++i) {
if (Thread.interrupted())
throw new InterruptedException();
detectors[i].report();
}
}
/**
* Parse the data for a class to create a JavaClass object.
*/
private static JavaClass parseClass(String archiveName, InputStream in, String fileName)
throws IOException {
if (DEBUG) System.out.println("About to parse " + fileName + " in " + archiveName);
return parseFromStream(in, fileName);
}
/**
* Parse the data for a class to create a JavaClass object.
*/
private static JavaClass parseClass(URL url) throws IOException {
if (DEBUG) System.out.println("About to parse " + url.toString());
InputStream in = url.openStream();
return parseFromStream(in, url.toString());
}
/**
* Parse an input stream to produce a JavaClass object.
* Makes sure that the input stream is closed no
* matter what.
*/
private static JavaClass parseFromStream(InputStream in, String fileName) throws IOException {
boolean parsed = false;
try {
JavaClass jclass = new ClassParser(in, fileName).parse();
parsed = true;
return jclass;
} finally {
if (!parsed) {
// BCEL does not close the input stream unless
// parsing was successful.
try {
in.close();
} catch (IOException ignore) {
// Ignore
}
}
}
}
/**
* Process -bugCategories option.
*
* @param userPreferences UserPreferences representing which Detectors are enabled
* @param categories comma-separated list of bug categories
* @return Set of categories to be used
*/
private static Set<String> handleBugCategories(UserPreferences userPreferences, String categories) {
// Parse list of bug categories
Set<String> categorySet = new HashSet<String>();
StringTokenizer tok = new StringTokenizer(categories, ",");
while (tok.hasMoreTokens()) {
categorySet.add(tok.nextToken());
}
// // Enable only those detectors that can emit those categories
// // (and the ones that produce unknown bug patterns, just to be safe).
// // Skip disabled detectors, though.
// for (Iterator<DetectorFactory> i = DetectorFactoryCollection.instance().factoryIterator(); i.hasNext();) {
// DetectorFactory factory = i.next();
// if (!factory.isEnabledForCurrentJRE())
// continue;
// Collection<BugPattern> reported = factory.getReportedBugPatterns();
// boolean enable = false;
// if (reported.isEmpty()) {
// // Don't know what bug patterns are produced by this detector
// if (DEBUG) System.out.println("Unknown bug patterns for " + factory.getShortName());
// enable = true;
// } else {
// for (Iterator<BugPattern> j = reported.iterator(); j.hasNext();) {
// BugPattern bugPattern = j.next();
// if (categorySet.contains(bugPattern.getCategory())) {
// if (DEBUG)
// System.out.println("MATCH ==> " + categorySet +
// " -- " + bugPattern.getCategory());
// enable = true;
// break;
// }
// }
// }
// if (DEBUG && enable) {
// System.out.println("Enabling " + factory.getShortName());
// }
// userPreferences.enableDetector(factory, enable);
// }
return categorySet;
}
/* ----------------------------------------------------------------------
* main() method
* ---------------------------------------------------------------------- */
public static void main(String[] argv) {
try {
TextUICommandLine commandLine = new TextUICommandLine();
FindBugs findBugs = createEngine(commandLine, argv);
try {
runMain(findBugs, commandLine);
} catch (RuntimeException e) {
System.err.println("Fatal exception: " + e.toString());
String currentClass = findBugs.getCurrentClass();
if (currentClass != null) {
System.err.println("\tWhile analyzing " + currentClass);
}
e.printStackTrace();
System.err.println("Please report the failure to " + Version.SUPPORT_EMAIL);
System.exit(1);
}
} catch (java.io.IOException e) {
// Probably a missing file
if (DEBUG) {
e.printStackTrace();
}
System.err.println("IO Error: " + e.getMessage());
System.exit(1);
} catch (FilterException e) {
System.err.println("Filter exception: " + e.getMessage());
} catch (IllegalArgumentException e) {
// Probably an illegal command line argument
System.err.println("Illegal argument: " + e.getMessage());
System.exit(1);
}
}
private static FindBugs createEngine(TextUICommandLine commandLine, String[] argv)
throws java.io.IOException, FilterException {
// Expand option files in command line.
// An argument beginning with "@" is treated as specifying
// the name of an option file.
// Each line of option files are treated as a single argument.
// Blank lines and comment lines (beginning with "#")
// are ignored.
argv = CommandLine.expandOptionFiles(argv, true, true);
int argCount = 0;
try {
argCount = commandLine.parse(argv);
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
showHelp(commandLine);
}
Project project = commandLine.getProject();
for (int i = argCount; i < argv.length; ++i)
project.addFile(argv[i]);
if (project.getFileCount() == 0) {
showHelp(commandLine);
}
return commandLine.createEngine();
}
private static void showHelp(TextUICommandLine commandLine) {
showSynopsis();
ShowHelp.showGeneralOptions();
FindBugs.showCommandLineOptions(commandLine);
System.exit(1);
}
private static void runMain(FindBugs findBugs, TextUICommandLine commandLine)
throws java.io.IOException, RuntimeException, FilterException {
try {
findBugs.execute();
} catch (InterruptedException e) {
// Not possible when running from the command line
}
int bugCount = findBugs.getBugCount();
int missingClassCount = findBugs.getMissingClassCount();
int errorCount = findBugs.getErrorCount();
if (!commandLine.quiet() || commandLine.setExitCode()) {
if (bugCount > 0)
System.err.println("Warnings generated: " + bugCount);
if (missingClassCount > 0)
System.err.println("Missing classes: " + missingClassCount);
if (errorCount > 0)
System.err.println("Analysis errors: " + errorCount);
}
if (commandLine.setExitCode()) {
int exitCode = 0;
if (errorCount > 0)
exitCode |= ERROR_FLAG;
if (missingClassCount > 0)
exitCode |= MISSING_CLASS_FLAG;
if (bugCount > 0)
exitCode |= BUGS_FOUND_FLAG;
System.exit(exitCode);
}
}
/**
* Print command line options synopses to stdout.
*/
public static void showCommandLineOptions() {
showCommandLineOptions(new TextUICommandLine());
}
public static void showCommandLineOptions(TextUICommandLine commandLine) {
System.out.println("Command line options:");
commandLine.printUsage(System.out);
}
public static void showSynopsis() {
System.out.println("Usage: findbugs [general options] -textui [command line options...] [jar/zip/class files, directories...]");
}
}
// vim:ts=4
| findbugs/src/java/edu/umd/cs/findbugs/FindBugs.java | /*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2003-2005 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import org.apache.bcel.Repository;
import org.apache.bcel.classfile.ClassFormatException;
import org.apache.bcel.classfile.ClassParser;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.util.ClassPath;
import edu.umd.cs.findbugs.ba.AnalysisContext;
import edu.umd.cs.findbugs.ba.AnalysisException;
import edu.umd.cs.findbugs.ba.AnalysisFeatures;
import edu.umd.cs.findbugs.ba.ClassContext;
import edu.umd.cs.findbugs.ba.ClassObserver;
import edu.umd.cs.findbugs.ba.URLClassPath;
import edu.umd.cs.findbugs.config.AnalysisFeatureSetting;
import edu.umd.cs.findbugs.config.CommandLine;
import edu.umd.cs.findbugs.config.UserPreferences;
import edu.umd.cs.findbugs.filter.Filter;
import edu.umd.cs.findbugs.filter.FilterException;
import edu.umd.cs.findbugs.filter.Matcher;
import edu.umd.cs.findbugs.plan.AnalysisPass;
import edu.umd.cs.findbugs.plan.ExecutionPlan;
import edu.umd.cs.findbugs.plan.OrderingConstraintException;
import edu.umd.cs.findbugs.visitclass.Constants2;
/**
* An instance of this class is used to apply the selected set of
* analyses on some collection of Java classes. It also implements the
* comand line interface.
*
* @author Bill Pugh
* @author David Hovemeyer
*/
public class FindBugs implements Constants2, ExitCodes {
/* ----------------------------------------------------------------------
* Helper classes
* ---------------------------------------------------------------------- */
/**
* Delegating InputStream wrapper that never closes the
* underlying input stream.
*/
private static class NoCloseInputStream extends DataInputStream {
/**
* Constructor.
* @param in the real InputStream
*/
public NoCloseInputStream(InputStream in) {
super(in);
}
public void close() {
}
}
/**
* Work list item specifying a file/directory/URL containing
* class files to analyze.
*/
private static class ArchiveWorkListItem {
private String fileName;
private boolean explicit;
/**
* Constructor.
*
* @param fileName file/directory/URL
* @param explicit true if this source of classes appeared explicitly
* in the project file, false if was found indirectly
* (e.g., a nested jar file in a .war file)
*/
public ArchiveWorkListItem(String fileName, boolean explicit) {
this.fileName = fileName;
this.explicit = explicit;
}
/**
* Get the file/directory/URL.
*/
public String getFileName() {
return fileName;
}
/**
* Return whether this class source appeared explicitly in
* the project file.
*/
public boolean isExplicit() {
return explicit;
}
}
/**
* Interface for an object representing a source of class files to analyze.
*/
private interface ClassProducer {
/**
* Get the next class to analyze.
*
* @return the class, or null of there are no more classes for this ClassProducer
* @throws IOException if an IOException occurs
* @throws InterruptedException if the thread is interrupted
*/
public JavaClass getNextClass() throws IOException, InterruptedException;
/**
* Did this class producer scan any Java source files?
*/
public boolean containsSourceFiles();
/**
* Close any internal files or streams.
*/
public void close();
}
/**
* ClassProducer for single class files.
*/
private class SingleClassProducer implements ClassProducer {
private URL url;
/**
* Constructor.
*
* @param url the single class file to be analyzed
*/
public SingleClassProducer(URL url) {
this.url = url;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
if (url == null)
return null;
if (Thread.interrupted())
throw new InterruptedException();
URL urlToParse = url;
url = null; // don't return it next time
// ClassScreener may veto this class.
if (!classScreener.matches(urlToParse.toString()))
return null;
try {
return parseClass(urlToParse);
} catch (ClassFormatException e) {
throw new ClassFormatException("Invalid class file format for " +
url.toString() + ": " + e.getMessage());
}
}
public boolean containsSourceFiles() {
return false;
}
public void close() {
// Nothing to do here
}
}
/**
* ClassProducer for zip/jar archives.
*/
private class ZipClassProducer implements ClassProducer {
private URL url;
private LinkedList<ArchiveWorkListItem> archiveWorkList;
private List<String> additionalAuxClasspathEntryList;
private ZipInputStream zipInputStream;
private boolean containsSourceFiles;
public ZipClassProducer(URL url, LinkedList<ArchiveWorkListItem> archiveWorkList,
List<String> additionalAuxClasspathEntryList)
throws IOException {
this.url = url;
this.archiveWorkList = archiveWorkList;
this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList;
if (DEBUG) System.out.println("Opening jar/zip input stream for " + url.toString());
this.zipInputStream = new ZipInputStream(url.openStream());
this.containsSourceFiles = false;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
for (;;) {
if (Thread.interrupted())
throw new InterruptedException();
ZipEntry zipEntry = zipInputStream.getNextEntry();
if (zipEntry == null)
return null;
try {
String entryName = zipEntry.getName();
// ClassScreener may veto this class.
if (!classScreener.matches(entryName)) {
// Add archive URL to aux classpath
if (!additionalAuxClasspathEntryList.contains(url.toString())) {
//System.out.println("Adding additional aux classpath entry: " + url.toString());
additionalAuxClasspathEntryList.add(url.toString());
}
continue;
}
String fileExtension = URLClassPath.getFileExtension(entryName);
if (fileExtension != null) {
if (fileExtension.equals(".class")) {
return parseClass(url.toString(), new NoCloseInputStream(zipInputStream), entryName);
} else if (archiveExtensionSet.contains(fileExtension)) {
// Add nested archive to archive work list
ArchiveWorkListItem nestedItem =
new ArchiveWorkListItem("jar:" + url.toString() + "!/" + entryName, false);
archiveWorkList.addFirst(nestedItem);
} else if (fileExtension.equals(".java")) {
containsSourceFiles = true;
}
}
} finally {
zipInputStream.closeEntry();
}
}
}
public boolean containsSourceFiles() {
return containsSourceFiles;
}
public void close() {
if (zipInputStream != null) {
try {
zipInputStream.close();
} catch (IOException ignore) {
// Ignore
}
}
}
}
/**
* ClassProducer for directories.
* The directory is scanned recursively for class files.
*/
private class DirectoryClassProducer implements ClassProducer {
private String dirName;
private List<String> additionalAuxClasspathEntryList;
private Iterator<String> rfsIter;
private boolean containsSourceFiles;
public DirectoryClassProducer(String dirName,
List<String> additionalAuxClasspathEntryList) throws InterruptedException {
this.dirName = dirName;
this.additionalAuxClasspathEntryList = additionalAuxClasspathEntryList;
FileFilter filter = new FileFilter() {
public boolean accept(File file) {
String fileName = file.getName();
if (file.isDirectory() || fileName.endsWith(".class"))
return true;
if (fileName.endsWith(".java"))
containsSourceFiles = true;
return false;
}
};
// This will throw InterruptedException if the thread is
// interrupted.
RecursiveFileSearch rfs = new RecursiveFileSearch(dirName, filter).search();
this.rfsIter = rfs.fileNameIterator();
this.containsSourceFiles = false;
}
public JavaClass getNextClass() throws IOException, InterruptedException {
String fileName;
for (;;) {
if (!rfsIter.hasNext())
return null;
fileName = rfsIter.next();
if (classScreener.matches(fileName)) {
break;
} else {
// Add directory URL to aux classpath
String dirURL= "file:" + dirName;
if (!additionalAuxClasspathEntryList.contains(dirURL)) {
//System.out.println("Adding additional aux classpath entry: " + dirURL);
additionalAuxClasspathEntryList.add(dirURL);
}
}
}
try {
return parseClass(new URL("file:" + fileName));
} catch (ClassFormatException e) {
throw new ClassFormatException("Invalid class file format for " +
fileName + ": " + e.getMessage());
}
}
public boolean containsSourceFiles() {
return containsSourceFiles;
}
public void close() {
// Nothing to do here
}
}
/**
* A delegating bug reporter which counts reported bug instances,
* missing classes, and serious analysis errors.
*/
private static class ErrorCountingBugReporter extends DelegatingBugReporter {
private int bugCount;
private int missingClassCount;
private int errorCount;
private Set<String> missingClassSet = new HashSet<String>();
public ErrorCountingBugReporter(BugReporter realBugReporter) {
super(realBugReporter);
this.bugCount = 0;
this.missingClassCount = 0;
this.errorCount = 0;
// Add an observer to record when bugs make it through
// all priority and filter criteria, so our bug count is
// accurate.
realBugReporter.addObserver(new BugReporterObserver() {
public void reportBug(BugInstance bugInstance) {
++bugCount;
}
});
}
public int getBugCount() {
return bugCount;
}
public int getMissingClassCount() {
return missingClassCount;
}
public int getErrorCount() {
return errorCount;
}
public void logError(String message) {
++errorCount;
super.logError(message);
}
public void reportMissingClass(ClassNotFoundException ex) {
String missing = AbstractBugReporter.getMissingClassName(ex);
if (missingClassSet.add(missing))
++missingClassCount;
super.reportMissingClass(ex);
}
}
private static class CategoryFilteringBugReporter extends DelegatingBugReporter {
private Set<String> categorySet;
public CategoryFilteringBugReporter(BugReporter realBugReporter, Set<String> categorySet) {
super(realBugReporter);
this.categorySet = categorySet;
}
public void reportBug(BugInstance bugInstance) {
BugPattern bugPattern = bugInstance.getBugPattern();
String category = bugPattern.getCategory();
if (categorySet.contains(category))
getRealBugReporter().reportBug(bugInstance);
}
}
/**
* Handling callback for choose() method,
* used to implement the -chooseVisitors and -choosePlugins options.
*/
private interface Chooser {
/**
* Choose a detector, plugin, etc.
*
* @param enable whether or not the item should be enabled
* @param what the item
*/
public void choose(boolean enable, String what);
}
private static final int PRINTING_REPORTER = 0;
private static final int SORTING_REPORTER = 1;
private static final int XML_REPORTER = 2;
private static final int EMACS_REPORTER = 3;
private static final int HTML_REPORTER = 4;
private static final int XDOCS_REPORTER = 5;
public static final AnalysisFeatureSetting[] MIN_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, true),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, false),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, false),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false),
};
public static final AnalysisFeatureSetting[] DEFAULT_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, false),
};
public static final AnalysisFeatureSetting[] MAX_EFFORT = new AnalysisFeatureSetting[]{
new AnalysisFeatureSetting(AnalysisFeatures.CONSERVE_SPACE, false),
new AnalysisFeatureSetting(AnalysisFeatures.ACCURATE_EXCEPTIONS, true),
new AnalysisFeatureSetting(AnalysisFeatures.MODEL_INSTANCEOF, true),
new AnalysisFeatureSetting(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS, true),
};
/**
* Helper class to parse the command line and create
* the FindBugs engine object.
*/
private static class TextUICommandLine extends FindBugsCommandLine {
private int bugReporterType = PRINTING_REPORTER;
private boolean relaxedReportingMode = false;
private boolean useLongBugCodes = false;
private boolean xmlWithMessages = false;
private String stylesheet = null;
private boolean quiet = false;
private ClassScreener classScreener = new ClassScreener();
private String filterFile = null;
private boolean include = false;
private boolean setExitCode = false;
private int priorityThreshold = Detector.NORMAL_PRIORITY;
private PrintStream outputStream = null;
private Set<String> bugCategorySet = null;
private UserPreferences userPreferences = UserPreferences.createDefaultUserPreferences();
private String trainingOutputDir;
private String trainingInputDir;
public TextUICommandLine() {
super();
addSwitch("-showPlugins", "show list of available plugins");
addSwitch("-quiet", "suppress error messages");
addSwitch("-longBugCodes", "report long bug codes");
addSwitch("-experimental", "report all warnings including experimental bug patterns");
addSwitch("-low", "report all warnings");
addSwitch("-medium", "report only medium and high priority warnings [default]");
addSwitch("-high", "report only high priority warnings");
addSwitch("-sortByClass", "sort warnings by class");
addSwitchWithOptionalExtraPart("-xml", "withMessages",
"XML output (optionally with messages)");
addSwitch("-xdocs", "xdoc XML output to use with Apache Maven");
addSwitchWithOptionalExtraPart("-html", "stylesheet",
"Generate HTML output (default stylesheet is default.xsl)");
addSwitch("-emacs", "Use emacs reporting format");
addSwitch("-relaxed", "Relaxed reporting mode (more false positives!)");
addSwitchWithOptionalExtraPart("-train", "outputDir",
"Save training data (experimental); output dir defaults to '.'");
addSwitchWithOptionalExtraPart("-useTraining", "inputDir",
"Use training data (experimental); input dir defaults to '.'");
addOption("-outputFile", "filename", "Save output in named file");
addOption("-visitors", "v1[,v2...]", "run only named visitors");
addOption("-omitVisitors", "v1[,v2...]", "omit named visitors");
addOption("-chooseVisitors", "+v1,-v2,...", "selectively enable/disable detectors");
addOption("-choosePlugins", "+p1,-p2,...", "selectively enable/disable plugins");
addOption("-adjustPriority", "v1=(raise|lower)[,...]",
"raise/lower priority of warnings for given visitor(s)");
addOption("-bugCategories", "cat1[,cat2...]", "only report bugs in given categories");
addOption("-onlyAnalyze", "classes/packages", "only analyze given classes and packages");
addOption("-exclude", "filter file", "exclude bugs matching given filter");
addOption("-include", "filter file", "include only bugs matching given filter");
addOption("-auxclasspath", "classpath", "set aux classpath for analysis");
addOption("-sourcepath", "source path", "set source path for analyzed classes");
addSwitch("-exitcode", "set exit code of process");
}
public Project getProject() {
return project;
}
public boolean setExitCode() {
return setExitCode;
}
public boolean quiet() {
return quiet;
}
protected void handleOption(String option, String optionExtraPart) {
if (option.equals("-showPlugins")) {
System.out.println("Available plugins:");
int count = 0;
for (Iterator<Plugin> i = DetectorFactoryCollection.instance().pluginIterator(); i.hasNext(); ) {
Plugin plugin = i.next();
System.out.println(" " + plugin.getPluginId() + " (default: " +
(plugin.isEnabled() ? "enabled" : "disabled") + ")");
if (plugin.getShortDescription() != null)
System.out.println(" Description: " + plugin.getShortDescription());
if (plugin.getProvider() != null)
System.out.println(" Provider: " + plugin.getProvider());
if (plugin.getWebsite() != null)
System.out.println(" Website: " + plugin.getWebsite());
++count;
}
if (count == 0) {
System.out.println(" No plugins are available (FindBugs installed incorrectly?)");
}
System.exit(0);
} else if (option.equals("-experimental"))
priorityThreshold = Detector.EXP_PRIORITY;
else if (option.equals("-longBugCodes"))
useLongBugCodes = true;
else if (option.equals("-low"))
priorityThreshold = Detector.LOW_PRIORITY;
else if (option.equals("-medium"))
priorityThreshold = Detector.NORMAL_PRIORITY;
else if (option.equals("-high"))
priorityThreshold = Detector.HIGH_PRIORITY;
else if (option.equals("-sortByClass"))
bugReporterType = SORTING_REPORTER;
else if (option.equals("-xml")) {
bugReporterType = XML_REPORTER;
if (!optionExtraPart.equals("")) {
if (optionExtraPart.equals("withMessages"))
xmlWithMessages = true;
else
throw new IllegalArgumentException("Unknown option: -xml:" + optionExtraPart);
}
} else if (option.equals("-emacs")) {
bugReporterType = EMACS_REPORTER;
} else if (option.equals("-relaxed")) {
relaxedReportingMode = true;
} else if (option.equals("-train")) {
trainingOutputDir = !optionExtraPart.equals("") ? optionExtraPart : ".";
} else if (option.equals("-useTraining")) {
trainingInputDir = !optionExtraPart.equals("") ? optionExtraPart : ".";
} else if (option.equals("-html")) {
bugReporterType = HTML_REPORTER;
if (!optionExtraPart.equals("")) {
stylesheet = optionExtraPart;
} else {
stylesheet = "default.xsl";
}
} else if (option.equals("-xdocs"))
bugReporterType = XDOCS_REPORTER;
else if (option.equals("-quiet"))
quiet = true;
else if (option.equals("-exitcode"))
setExitCode = true;
else {
super.handleOption(option, optionExtraPart);
}
}
protected void handleOptionWithArgument(String option, String argument) throws IOException {
if (option.equals("-outputFile")) {
String outputFile = argument;
try {
outputStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outputFile)));
} catch (IOException e) {
System.err.println("Couldn't open " + outputFile + " for output: " + e.toString());
System.exit(1);
}
} else if (option.equals("-visitors") || option.equals("-omitVisitors")) {
boolean omit = option.equals("-omitVisitors");
if (!omit) {
// Selecting detectors explicitly, so start out by
// disabling all of them. The selected ones will
// be re-enabled.
userPreferences.enableAllDetectors(false);
}
// Explicitly enable or disable the selected detectors.
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String visitorName = tok.nextToken();
DetectorFactory factory = DetectorFactoryCollection.instance().getFactory(visitorName);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + visitorName);
userPreferences.enableDetector(factory, !omit);
}
} else if (option.equals("-chooseVisitors")) {
// This is like -visitors and -omitVisitors, but
// you can selectively enable and disable detectors,
// starting from the default set (or whatever set
// happens to be in effect).
choose(argument, "Detector choices", new Chooser() {
public void choose(boolean enabled, String what) {
DetectorFactory factory = DetectorFactoryCollection.instance()
.getFactory(what);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + what);
if (DEBUG) {
System.err.println("Detector " + factory.getShortName() + " " +
(enabled ? "enabled" : "disabled") +
", userPreferences="+System.identityHashCode(userPreferences));
}
userPreferences.enableDetector(factory, enabled);
}
});
} else if (option.equals("-choosePlugins")) {
// Selectively enable/disable plugins
choose(argument, "Plugin choices", new Chooser() {
public void choose(boolean enabled, String what) {
Plugin plugin = DetectorFactoryCollection.instance().getPluginById(what);
if (plugin == null)
throw new IllegalArgumentException("Unknown plugin: " + what);
plugin.setEnabled(enabled);
}
});
} else if (option.equals("-adjustPriority")) {
// Selectively raise or lower the priority of warnings
// produced by specified detectors.
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String token = tok.nextToken();
int eq = token.indexOf('=');
if (eq < 0)
throw new IllegalArgumentException("Illegal priority adjustment: " + token);
String visitorName = token.substring(0, eq);
DetectorFactory factory = DetectorFactoryCollection.instance()
.getFactory(visitorName);
if (factory == null)
throw new IllegalArgumentException("Unknown detector: " + visitorName);
String adjustment = token.substring(eq + 1);
if (!(adjustment.equals("raise") || adjustment.equals("lower")))
throw new IllegalArgumentException("Illegal priority adjustment value: " +
adjustment);
// Recall that lower values are higher priorities
factory.setPriorityAdjustment(adjustment.equals("raise") ? -1 : +1);
}
} else if (option.equals("-bugCategories")) {
this.bugCategorySet = handleBugCategories(userPreferences, argument);
} else if (option.equals("-onlyAnalyze")) {
// The argument is a comma-separated list of classes and packages
// to select to analyze. (If a list item ends with ".*",
// it specifies a package, otherwise it's a class.)
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String item = tok.nextToken();
if (item.endsWith(".-"))
classScreener.addAllowedPrefix(item.substring(0, item.length() - 1));
else if (item.endsWith(".*"))
classScreener.addAllowedPackage(item.substring(0, item.length() - 1));
else
classScreener.addAllowedClass(item);
}
} else if (option.equals("-exclude") || option.equals("-include")) {
filterFile = argument;
include = option.equals("-include");
} else if (option.equals("-auxclasspath")) {
StringTokenizer tok = new StringTokenizer(argument, File.pathSeparator);
while (tok.hasMoreTokens())
project.addAuxClasspathEntry(tok.nextToken());
} else if (option.equals("-sourcepath")) {
StringTokenizer tok = new StringTokenizer(argument, File.pathSeparator);
while (tok.hasMoreTokens())
project.addSourceDir(new File(tok.nextToken()).getAbsolutePath());
} else {
super.handleOptionWithArgument(option, argument);
}
}
/**
* Common handling code for -chooseVisitors and -choosePlugins options.
*
* @param argument the list of visitors or plugins to be chosen
* @param desc String describing what is being chosen
* @param chooser callback object to selectively choose list members
*/
private void choose(String argument, String desc, Chooser chooser) {
StringTokenizer tok = new StringTokenizer(argument, ",");
while (tok.hasMoreTokens()) {
String what = tok.nextToken();
if (!what.startsWith("+") && !what.startsWith("-"))
throw new IllegalArgumentException(desc + " must start with " +
"\"+\" or \"-\" (saw " + what + ")");
boolean enabled = what.startsWith("+");
chooser.choose(enabled, what.substring(1));
}
}
public FindBugs createEngine() throws IOException, FilterException {
TextUIBugReporter textuiBugReporter = null;
switch (bugReporterType) {
case PRINTING_REPORTER:
textuiBugReporter = new PrintingBugReporter();
break;
case SORTING_REPORTER:
textuiBugReporter = new SortingBugReporter();
break;
case XML_REPORTER:
{
XMLBugReporter xmlBugReporter = new XMLBugReporter(project);
xmlBugReporter.setAddMessages(xmlWithMessages);
textuiBugReporter = xmlBugReporter;
}
break;
case EMACS_REPORTER:
textuiBugReporter = new EmacsBugReporter();
break;
case HTML_REPORTER:
textuiBugReporter = new HTMLBugReporter(project, stylesheet);
break;
case XDOCS_REPORTER:
textuiBugReporter = new XDocsBugReporter(project);
break;
default:
throw new IllegalStateException();
}
if (quiet)
textuiBugReporter.setErrorVerbosity(BugReporter.SILENT);
textuiBugReporter.setPriorityThreshold(priorityThreshold);
textuiBugReporter.setUseLongBugCodes(useLongBugCodes);
if (outputStream != null)
textuiBugReporter.setOutputStream(outputStream);
BugReporter bugReporter = textuiBugReporter;
if (bugCategorySet != null) {
bugReporter = new CategoryFilteringBugReporter(bugReporter, bugCategorySet);
}
FindBugs findBugs = new FindBugs(bugReporter, project);
findBugs.setUserPreferences(userPreferences);
if (filterFile != null)
findBugs.setFilter(filterFile, include);
findBugs.setClassScreener(classScreener);
findBugs.setRelaxedReportingMode(relaxedReportingMode);
if (trainingOutputDir != null) {
findBugs.enableTrainingOutput(trainingOutputDir);
}
if (trainingInputDir != null) {
findBugs.enableTrainingInput(trainingInputDir);
}
findBugs.setAnalysisFeatureSettings(settingList);
return findBugs;
}
}
/* ----------------------------------------------------------------------
* Member variables
* ---------------------------------------------------------------------- */
public static final boolean DEBUG = Boolean.getBoolean("findbugs.debug");
/**
* FindBugs home directory.
*/
private static String home;
/**
* File extensions that indicate an archive (zip, jar, or similar).
*/
static public final Set<String> archiveExtensionSet = new HashSet<String>();
static {
archiveExtensionSet.add(".jar");
archiveExtensionSet.add(".zip");
archiveExtensionSet.add(".war");
archiveExtensionSet.add(".ear");
archiveExtensionSet.add(".sar");
}
/**
* Known URL protocols.
* Filename URLs that do not have an explicit protocol are
* assumed to be files.
*/
static public final Set<String> knownURLProtocolSet = new HashSet<String>();
static {
knownURLProtocolSet.add("file");
knownURLProtocolSet.add("http");
knownURLProtocolSet.add("https");
knownURLProtocolSet.add("jar");
}
private ErrorCountingBugReporter bugReporter;
private boolean relaxedReportingMode;
private Project project;
private UserPreferences userPreferences;
private List<ClassObserver> classObserverList;
private ExecutionPlan executionPlan;
private FindBugsProgress progressCallback;
private ClassScreener classScreener;
private AnalysisContext analysisContext;
private String currentClass;
private Map<String,Long> detectorTimings;
private boolean useTrainingInput;
private boolean emitTrainingOutput;
private String trainingInputDir;
private String trainingOutputDir;
//private boolean runSlowFirstPassDetectors;
private AnalysisFeatureSetting[] settingList = DEFAULT_EFFORT;
private int passCount;
/* ----------------------------------------------------------------------
* Public methods
* ---------------------------------------------------------------------- */
/**
* Constructor.
*
* @param bugReporter the BugReporter object that will be used to report
* BugInstance objects, analysis errors, class to source mapping, etc.
* @param project the Project indicating which files to analyze and
* the auxiliary classpath to use; note that the FindBugs
* object will create a private copy of the Project object
*/
public FindBugs(BugReporter bugReporter, Project project) {
if (bugReporter == null)
throw new IllegalArgumentException("null bugReporter");
if (project == null)
throw new IllegalArgumentException("null project");
this.bugReporter = new ErrorCountingBugReporter(bugReporter);
this.relaxedReportingMode = false;
this.project = project.duplicate();
this.userPreferences = UserPreferences.createDefaultUserPreferences();
this.classObserverList = new LinkedList<ClassObserver>();
// Create a no-op progress callback.
this.progressCallback = new FindBugsProgress() {
public void reportNumberOfArchives(int numArchives) {
}
public void finishArchive() {
}
public void startAnalysis(int numClasses) {
}
public void finishClass() {
}
public void finishPerClassAnalysis() {
}
};
// Class screener
this.classScreener = new ClassScreener();
addClassObserver(bugReporter);
}
/**
* Set the progress callback that will be used to keep track
* of the progress of the analysis.
*
* @param progressCallback the progress callback
*/
public void setProgressCallback(FindBugsProgress progressCallback) {
this.progressCallback = progressCallback;
}
/**
* Set filter of bug instances to include or exclude.
*
* @param filterFileName the name of the filter file
* @param include true if the filter specifies bug instances to include,
* false if it specifies bug instances to exclude
*/
public void setFilter(String filterFileName, boolean include) throws IOException, FilterException {
Filter filter = new Filter(filterFileName);
BugReporter origBugReporter = bugReporter.getRealBugReporter();
BugReporter filterBugReporter = new FilterBugReporter(origBugReporter, (Matcher)filter, include);
bugReporter.setRealBugReporter(filterBugReporter);
}
/**
* Set the UserPreferences representing which Detectors
* should be used. If UserPreferences are not set explicitly,
* the default set of Detectors will be used.
*
* @param userPreferences the UserPreferences
*/
public void setUserPreferences(UserPreferences userPreferences) {
this.userPreferences = userPreferences;
}
/**
* Add a ClassObserver.
*
* @param classObserver the ClassObserver
*/
public void addClassObserver(ClassObserver classObserver) {
classObserverList.add(classObserver);
}
/**
* Set the ClassScreener.
* This object chooses which individual classes to analyze.
* By default, all classes are analyzed.
*
* @param classScreener the ClassScreener to use
*/
public void setClassScreener(ClassScreener classScreener) {
this.classScreener = classScreener;
}
/**
* Set relaxed reporting mode.
*
* @param relaxedReportingMode true if relaxed reporting mode should be enabled,
* false if not
*/
public void setRelaxedReportingMode(boolean relaxedReportingMode) {
this.relaxedReportingMode = relaxedReportingMode;
}
/**
* Set whether or not training output should be emitted.
*
* @param trainingOutputDir directory to save training output in
*/
public void enableTrainingOutput(String trainingOutputDir) {
this.emitTrainingOutput = true;
this.trainingOutputDir = trainingOutputDir;
}
/**
* Set whether or not training input should be used to
* make the analysis more precise.
*
* @param trainingInputDir directory to load training input from
*/
public void enableTrainingInput(String trainingInputDir) {
this.useTrainingInput = true;
this.trainingInputDir = trainingInputDir;
}
/**
* Set analysis feature settings.
*
* @param settingList list of analysis feature settings
*/
public void setAnalysisFeatureSettings(AnalysisFeatureSetting[] settingList) {
if (settingList != null)
this.settingList = settingList;
}
/**
* Execute FindBugs on the Project.
* All bugs found are reported to the BugReporter object which was set
* when this object was constructed.
*
* @throws java.io.IOException if an I/O exception occurs analyzing one of the files
* @throws InterruptedException if the thread is interrupted while conducting the analysis
*/
public void execute() throws java.io.IOException, InterruptedException {
// Configure the analysis context
analysisContext = new AnalysisContext(bugReporter);
analysisContext.setSourcePath(project.getSourceDirList());
// Enable/disable relaxed reporting mode
FindBugsAnalysisFeatures.setRelaxedMode(relaxedReportingMode);
// Enable input/output of interprocedural property databases
if (emitTrainingOutput) {
if (!new File(trainingOutputDir).isDirectory())
throw new IOException("Training output directory " + trainingOutputDir + " does not exist");
AnalysisContext.currentAnalysisContext().setDatabaseOutputDir(trainingOutputDir);
// XXX: hack
System.setProperty("findbugs.checkreturn.savetraining", new File(trainingOutputDir, "checkReturn.db").getPath());
}
if (useTrainingInput) {
if (!new File(trainingInputDir).isDirectory())
throw new IOException("Training input directory " + trainingInputDir + " does not exist");
AnalysisContext.currentAnalysisContext().setDatabaseInputDir(trainingInputDir);
AnalysisContext.currentAnalysisContext().loadInterproceduralDatabases();
// XXX: hack
System.setProperty("findbugs.checkreturn.loadtraining", new File(trainingInputDir, "checkReturn.db").getPath());
}
configureAnalysisFeatures();
// Give the BugReporter a reference to this object,
// in case it wants to access information such
// as the AnalysisContext
bugReporter.setEngine(this);
// Create execution plan
try {
createExecutionPlan();
} catch (OrderingConstraintException e) {
IOException ioe = new IOException("Invalid detector ordering constraints");
ioe.initCause(e);
throw ioe;
}
// Clear the repository of classes
analysisContext.clearRepository();
// Get list of files to analyze.
// Note that despite the name getJarFileArray(),
// they can also be zip files, directories,
// and single class files.
LinkedList<ArchiveWorkListItem> archiveWorkList = new LinkedList<ArchiveWorkListItem>();
for (Iterator<String> i = project.getFileList().iterator(); i.hasNext(); ) {
String fileName = i.next();
archiveWorkList.add(new ArchiveWorkListItem(fileName, true));
}
// Report how many archives/directories/files will be analyzed,
// for progress dialog in GUI
progressCallback.reportNumberOfArchives(archiveWorkList.size());
// Keep track of the names of all classes to be analyzed
List<String> repositoryClassList = new LinkedList<String>();
// set the initial repository classpath.
setRepositoryClassPath();
// Record additional entries that should be added to
// the aux classpath. These occur when one or more classes
// in a directory or archive are skipped, to ensure that
// the skipped classes can still be referenced.
List<String> additionalAuxClasspathEntryList = new LinkedList<String>();
// Add all classes in analyzed archives/directories/files
while (!archiveWorkList.isEmpty()) {
ArchiveWorkListItem item = archiveWorkList.removeFirst();
scanArchiveOrDirectory(item, archiveWorkList, repositoryClassList,
additionalAuxClasspathEntryList);
}
// Add "extra" aux classpath entries needed to ensure that
// skipped classes can be referenced.
addCollectionToClasspath(additionalAuxClasspathEntryList);
// Examine all classes for bugs.
// Don't examine the same class more than once.
// (The user might specify two jar files that contain
// the same class.)
if (DEBUG)
detectorTimings = new HashMap<String,Long>();
// Execute each analysis pass in the execution plan
for (Iterator<AnalysisPass> i = executionPlan.passIterator(); i.hasNext();) {
AnalysisPass analysisPass = i.next();
analysisPass.createDetectors(bugReporter);
executeAnalysisPass(analysisPass, repositoryClassList);
// Clear the ClassContext cache.
// It may contain data that should be recomputed on the next pass.
analysisContext.clearClassContextCache();
}
// Flush any queued bug reports
bugReporter.finish();
// Flush any queued error reports
bugReporter.reportQueuedErrors();
// Free up memory for reports
analysisContext.clearRepository();
}
/**
* Get the analysis context.
* It is only valid to call this method after the execute()
* method has been called.
*/
public AnalysisContext getAnalysisContext() {
return analysisContext;
}
/**
* Get the name of the most recent class to be analyzed.
* This is useful for diagnosing an unexpected exception.
* Returns null if no class has been analyzed.
*/
public String getCurrentClass() {
return currentClass;
}
/**
* Get the number of bug instances that were reported during analysis.
*/
public int getBugCount() {
return bugReporter.getBugCount();
}
/**
* Get the number of errors that occurred during analysis.
*/
public int getErrorCount() {
return bugReporter.getErrorCount();
}
/**
* Get the number of time missing classes were reported during analysis.
*/
public int getMissingClassCount() {
return bugReporter.getMissingClassCount();
}
/**
* Set the FindBugs home directory.
*/
public static void setHome(String home) {
FindBugs.home = home;
}
/**
* Get the FindBugs home directory.
*/
public static String getHome() {
if (home == null) {
home = System.getProperty("findbugs.home");
if (home == null) {
System.err.println("Error: The findbugs.home property is not set!");
}
}
return home;
}
/* ----------------------------------------------------------------------
* Private methods
* ---------------------------------------------------------------------- */
/**
* Configure analysis features.
*/
private void configureAnalysisFeatures() {
for (AnalysisFeatureSetting setting : settingList) {
setting.configure(analysisContext);
}
}
/**
* Create the ExecutionPlan.
*
* @throws OrderingConstraintException
*/
private void createExecutionPlan() throws OrderingConstraintException {
executionPlan = new ExecutionPlan();
// Only enabled detectors should be part of the execution plan
executionPlan.setDetectorFactoryChooser(new DetectorFactoryChooser() {
public boolean choose(DetectorFactory factory) {
boolean enabled = isDetectorEnabled(factory);
// if (ExecutionPlan.DEBUG) {
// System.out.println(factory.getShortName() + ": enabled=" + enabled);
// }
return enabled;
}
});
// Add plugins
for (Iterator<Plugin> i = DetectorFactoryCollection.instance().pluginIterator(); i.hasNext();) {
Plugin plugin = i.next();
executionPlan.addPlugin(plugin);
}
// Build the plan
executionPlan.build();
}
/**
* Determing whether or not given DetectorFactory should be enabled.
*
* @param factory the DetectorFactory
* @return true if the DetectorFactory should be enabled, false otherwise
*/
private boolean isDetectorEnabled(DetectorFactory factory) {
if (!factory.getPlugin().isEnabled())
return false;
if (!userPreferences.isDetectorEnabled(factory))
return false;
if (!factory.isEnabledForCurrentJRE())
return false;
// Slow first pass detectors are usually disabled, but may be explicitly enabled
if (!analysisContext.getBoolProperty(FindBugsAnalysisFeatures.INTERPROCEDURAL_ANALYSIS)
&& factory.isDetectorClassSubtypeOf(InterproceduralFirstPassDetector.class))
return false;
// Training detectors are enabled if, and only if, we are emitting training output
// XXX: grotesque hack for NoteCheckReturnValue. Need to fix for real.
boolean isTrainingDetector = factory.isDetectorClassSubtypeOf(TrainingDetector.class)
|| (emitTrainingOutput && factory.getFullName().equals("edu.umd.cs.findbugs.detect.NoteCheckReturnValue"));
if (isTrainingDetector != emitTrainingOutput)
return false;
return true;
}
/**
* Based on Project settings, set the classpath to be used
* by the Repository when looking up classes.
* @throws IOException
*/
private void setRepositoryClassPath() {
// Set aux classpath entries
addCollectionToClasspath(project.getAuxClasspathEntryList());
// Set implicit classpath entries
addCollectionToClasspath(project.getImplicitClasspathEntryList());
// Add system classpath entries
String systemClassPath = ClassPath.getClassPath();
StringTokenizer tok = new StringTokenizer(systemClassPath, File.pathSeparator);
while (tok.hasMoreTokens()) {
String entry = tok.nextToken();
try {
analysisContext.addClasspathEntry(entry);
} catch (IOException e) {
bugReporter.logError("Warning: could not add URL " +
entry + " to classpath", e);
}
}
}
/**
* Add all classpath entries in given Collection to the given
* URLClassPathRepository. Missing entries are not fatal:
* we'll log them as analysis errors, but the analysis can
* continue.
*
* @param collection classpath entries to add
* @param repository URLClassPathRepository to add the entries to
*/
private void addCollectionToClasspath(Collection<String> collection) {
for (Iterator<String> i = collection.iterator(); i.hasNext(); ) {
String entry = i.next();
try {
//repository.addURL(entry);
analysisContext.addClasspathEntry(entry);
} catch (IOException e) {
bugReporter.logError("Warning: could not add URL " +
entry + " to classpath", e);
}
}
}
/**
* Add all classes contained in given file or directory to the BCEL Repository.
*
* @param item work list item representing the file, which may be a jar/zip
* archive, a single class file, or a directory to be recursively
* searched for class files
* @param archiveWorkList work list of archives to analyze: this method
* may add to the work list if it finds nested archives
* @param repositoryClassList a List to which all classes found in
* the archive or directory are added, so we later know
* which files to analyze
*/
private void scanArchiveOrDirectory(ArchiveWorkListItem item,
LinkedList<ArchiveWorkListItem> archiveWorkList, List<String> repositoryClassList,
List<String> additionalAuxClasspathEntryList)
throws IOException, InterruptedException {
String fileName = item.getFileName();
ClassProducer classProducer = null;
try {
// Create a URL for the filename.
// The protocol defaults to "file" if not explicitly
// specified in the filename.
String protocol = URLClassPath.getURLProtocol(fileName);
if (protocol == null) {
protocol = "file";
fileName = "file:" + fileName;
}
URL url = new URL(fileName);
// Figure out the file extension
String fileExtension = null;
int lastDot = fileName.lastIndexOf('.');
if (lastDot >= 0) {
fileExtension = fileName.substring(lastDot);
}
// Create the ClassProducer
if (fileExtension != null && URLClassPath.isArchiveExtension(fileExtension))
classProducer = new ZipClassProducer(url, archiveWorkList, additionalAuxClasspathEntryList);
else if (fileExtension != null && fileExtension.equals(".class"))
classProducer = new SingleClassProducer(url);
else if (protocol.equals("file")) {
// Assume it's a directory
fileName = fileName.substring("file:".length());
File dir = new File(fileName);
if (!dir.isDirectory())
throw new IOException("Path " + fileName + " is not an archive, class file, or directory");
classProducer = new DirectoryClassProducer(fileName, additionalAuxClasspathEntryList);
} else
throw new IOException("URL " + fileName + " is not an archive, class file, or directory");
// Load all referenced classes into the Repository
for (; ;) {
if (Thread.interrupted())
throw new InterruptedException();
try {
JavaClass jclass = classProducer.getNextClass();
if (jclass == null)
break;
if (DEBUG) System.out.println("Scanned " + jclass.getClassName());
analysisContext.addApplicationClassToRepository(jclass);
repositoryClassList.add(jclass.getClassName());
} catch (ClassFormatException e) {
if (DEBUG) e.printStackTrace();
bugReporter.logError("Invalid classfile format", e);
}
}
if (item.isExplicit())
progressCallback.finishArchive();
// If the archive or directory scanned contained source files,
// add it to the end of the source path.
if (classProducer.containsSourceFiles())
project.addSourceDir(fileName);
} catch (IOException e) {
// You'd think that the message for a FileNotFoundException would include
// the filename, but you'd be wrong. So, we'll add it explicitly.
throw new IOException("Could not analyze " + fileName + ": " + e.getMessage());
} finally {
if (classProducer != null) {
classProducer.close();
}
}
}
/**
* Execute a single AnalysisPass.
*
* @param analysisPass the AnalysisPass
* @param repositoryClassList list of application classes in the repository
* @throws InterruptedException
*/
private void executeAnalysisPass(AnalysisPass analysisPass, List<String> repositoryClassList) throws InterruptedException {
// Callback for progress dialog: analysis is starting
progressCallback.startAnalysis(repositoryClassList.size());
if (ExecutionPlan.DEBUG) {
System.out.println("************* Analysis pass " + (passCount++) + " *************");
for (Iterator<DetectorFactory> i = analysisPass.iterator(); i.hasNext();) {
DetectorFactory factory = i.next();
System.out.println("\t" + factory.getFullName());
}
}
// Examine each class in the application
Set<String> examinedClassSet = new HashSet<String>();
for (Iterator<String> i = repositoryClassList.iterator(); i.hasNext();) {
String className = i.next();
if (examinedClassSet.add(className))
examineClass(analysisPass, className);
}
if (DEBUG) {
long total = 0;
Iterator<Long> timingsIt = detectorTimings.values().iterator();
while (timingsIt.hasNext()) {
total += timingsIt.next().longValue();
}
System.out.println();
System.out.println("Detector Timings");
Iterator<Map.Entry<String,Long>> it = detectorTimings.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String,Long> entry = it.next();
String detectorName = entry.getKey();
long detectorTime = entry.getValue().longValue();
System.out.println(detectorName + ": " + detectorTime + " ms -> (" + (detectorTime * 100.0f / (float)total) + ") %");
}
System.out.println();
detectorTimings = null;
}
// Callback for progress dialog: analysis finished
progressCallback.finishPerClassAnalysis();
// Force any detectors which defer work until all classes have
// been seen to do that work.
this.reportFinal(analysisPass.getDetectorList());
}
/**
* Examine a single class by invoking all of the Detectors on it.
*
* @param className the fully qualified name of the class to examine
*/
private void examineClass(AnalysisPass analysisPass, String className) throws InterruptedException {
if (DEBUG) System.out.println("Examining class " + className);
this.currentClass = className;
Detector[] detectors = analysisPass.getDetectorList();
try {
JavaClass javaClass = Repository.lookupClass(className);
// Notify ClassObservers
for (Iterator<ClassObserver> i = classObserverList.iterator(); i.hasNext();) {
i.next().observeClass(javaClass);
}
// Create a ClassContext for the class
ClassContext classContext = analysisContext.getClassContext(javaClass);
// Run the Detectors
for (int i = 0; i < detectors.length; ++i) {
if (Thread.interrupted())
throw new InterruptedException();
Detector detector = detectors[i];
if (detector instanceof StatelessDetector) {
try {
detector = (Detector)((StatelessDetector)detector).clone();
} catch (CloneNotSupportedException cnfe) {
detector = detectors[i]; // this shouldn't happen
}
}
try {
long start = 0, end;
if (DEBUG) {
System.out.println(" running " + detector.getClass().getName());
start = System.currentTimeMillis();
}
detector.visitClassContext(classContext);
if (DEBUG) {
end = System.currentTimeMillis();
long delta = end - start;
String detectorName = detector.getClass().getName();
Long total = detectorTimings.get(detectorName);
if (total == null)
total = new Long(delta);
else
total = new Long(total.longValue() + delta);
detectorTimings.put(detectorName, total);
}
} catch (AnalysisException e) {
reportRecoverableDetectorException(className, detector, e);
} catch (ArrayIndexOutOfBoundsException e) {
reportRecoverableDetectorException(className, detector, e);
} catch (ClassCastException e) {
reportRecoverableDetectorException(className, detector, e);
}
}
} catch (ClassNotFoundException e) {
// This should never happen unless there are bugs in BCEL.
bugReporter.reportMissingClass(e);
reportRecoverableException(className, e);
} catch (ClassFormatException e) {
reportRecoverableException(className, e);
}
catch (RuntimeException re) {
RuntimeException annotatedEx;
try {
String sep = System.getProperty("line.separator");
Constructor<? extends RuntimeException> c = re.getClass().getConstructor(new Class[] { String.class });
String msg = re.getMessage();
msg = sep + "While finding bugs in class: " + className + ((msg == null) ? "" : (sep + msg));
annotatedEx = c.newInstance(new Object[] {msg});
annotatedEx.setStackTrace(re.getStackTrace());
} catch (Exception e) {
throw re;
}
throw annotatedEx;
}
progressCallback.finishClass();
}
private void reportRecoverableException(String className, Exception e) {
if (DEBUG) {
e.printStackTrace();
}
bugReporter.logError("Exception analyzing " + className, e);
}
private void reportRecoverableDetectorException(String className, Detector detector, Exception e) {
if (DEBUG) {
e.printStackTrace();
}
bugReporter.logError("Exception analyzing " + className +
" using detector " + detector.getClass().getName(), e);
}
/**
* Call report() on all detectors, to give them a chance to
* report any accumulated bug reports.
*/
private void reportFinal(Detector[] detectors) throws InterruptedException {
for (int i = 0; i < detectors.length; ++i) {
if (Thread.interrupted())
throw new InterruptedException();
detectors[i].report();
}
}
/**
* Parse the data for a class to create a JavaClass object.
*/
private static JavaClass parseClass(String archiveName, InputStream in, String fileName)
throws IOException {
if (DEBUG) System.out.println("About to parse " + fileName + " in " + archiveName);
return parseFromStream(in, fileName);
}
/**
* Parse the data for a class to create a JavaClass object.
*/
private static JavaClass parseClass(URL url) throws IOException {
if (DEBUG) System.out.println("About to parse " + url.toString());
InputStream in = url.openStream();
return parseFromStream(in, url.toString());
}
/**
* Parse an input stream to produce a JavaClass object.
* Makes sure that the input stream is closed no
* matter what.
*/
private static JavaClass parseFromStream(InputStream in, String fileName) throws IOException {
boolean parsed = false;
try {
JavaClass jclass = new ClassParser(in, fileName).parse();
parsed = true;
return jclass;
} finally {
if (!parsed) {
// BCEL does not close the input stream unless
// parsing was successful.
try {
in.close();
} catch (IOException ignore) {
// Ignore
}
}
}
}
/**
* Process -bugCategories option.
*
* @param userPreferences UserPreferences representing which Detectors are enabled
* @param categories comma-separated list of bug categories
* @return Set of categories to be used
*/
private static Set<String> handleBugCategories(UserPreferences userPreferences, String categories) {
// Parse list of bug categories
Set<String> categorySet = new HashSet<String>();
StringTokenizer tok = new StringTokenizer(categories, ",");
while (tok.hasMoreTokens()) {
categorySet.add(tok.nextToken());
}
// // Enable only those detectors that can emit those categories
// // (and the ones that produce unknown bug patterns, just to be safe).
// // Skip disabled detectors, though.
// for (Iterator<DetectorFactory> i = DetectorFactoryCollection.instance().factoryIterator(); i.hasNext();) {
// DetectorFactory factory = i.next();
// if (!factory.isEnabledForCurrentJRE())
// continue;
// Collection<BugPattern> reported = factory.getReportedBugPatterns();
// boolean enable = false;
// if (reported.isEmpty()) {
// // Don't know what bug patterns are produced by this detector
// if (DEBUG) System.out.println("Unknown bug patterns for " + factory.getShortName());
// enable = true;
// } else {
// for (Iterator<BugPattern> j = reported.iterator(); j.hasNext();) {
// BugPattern bugPattern = j.next();
// if (categorySet.contains(bugPattern.getCategory())) {
// if (DEBUG)
// System.out.println("MATCH ==> " + categorySet +
// " -- " + bugPattern.getCategory());
// enable = true;
// break;
// }
// }
// }
// if (DEBUG && enable) {
// System.out.println("Enabling " + factory.getShortName());
// }
// userPreferences.enableDetector(factory, enable);
// }
return categorySet;
}
/* ----------------------------------------------------------------------
* main() method
* ---------------------------------------------------------------------- */
public static void main(String[] argv) {
try {
TextUICommandLine commandLine = new TextUICommandLine();
FindBugs findBugs = createEngine(commandLine, argv);
try {
runMain(findBugs, commandLine);
} catch (RuntimeException e) {
System.err.println("Fatal exception: " + e.toString());
String currentClass = findBugs.getCurrentClass();
if (currentClass != null) {
System.err.println("\tWhile analyzing " + currentClass);
}
e.printStackTrace();
System.err.println("Please report the failure to " + Version.SUPPORT_EMAIL);
System.exit(1);
}
} catch (java.io.IOException e) {
// Probably a missing file
if (DEBUG) {
e.printStackTrace();
}
System.err.println("IO Error: " + e.getMessage());
System.exit(1);
} catch (FilterException e) {
System.err.println("Filter exception: " + e.getMessage());
} catch (IllegalArgumentException e) {
// Probably an illegal command line argument
System.err.println("Illegal argument: " + e.getMessage());
System.exit(1);
}
}
private static FindBugs createEngine(TextUICommandLine commandLine, String[] argv)
throws java.io.IOException, FilterException {
// Expand option files in command line.
// An argument beginning with "@" is treated as specifying
// the name of an option file.
// Each line of option files are treated as a single argument.
// Blank lines and comment lines (beginning with "#")
// are ignored.
argv = CommandLine.expandOptionFiles(argv, true, true);
int argCount = 0;
try {
argCount = commandLine.parse(argv);
} catch (IllegalArgumentException e) {
System.out.println(e.getMessage());
showHelp(commandLine);
}
Project project = commandLine.getProject();
for (int i = argCount; i < argv.length; ++i)
project.addFile(argv[i]);
if (project.getFileCount() == 0) {
showHelp(commandLine);
}
return commandLine.createEngine();
}
private static void showHelp(TextUICommandLine commandLine) {
showSynopsis();
ShowHelp.showGeneralOptions();
FindBugs.showCommandLineOptions(commandLine);
System.exit(1);
}
private static void runMain(FindBugs findBugs, TextUICommandLine commandLine)
throws java.io.IOException, RuntimeException, FilterException {
try {
findBugs.execute();
} catch (InterruptedException e) {
// Not possible when running from the command line
}
int bugCount = findBugs.getBugCount();
int missingClassCount = findBugs.getMissingClassCount();
int errorCount = findBugs.getErrorCount();
if (!commandLine.quiet() || commandLine.setExitCode()) {
if (bugCount > 0)
System.err.println("Warnings generated: " + bugCount);
if (missingClassCount > 0)
System.err.println("Missing classes: " + missingClassCount);
if (errorCount > 0)
System.err.println("Analysis errors: " + errorCount);
}
if (commandLine.setExitCode()) {
int exitCode = 0;
if (errorCount > 0)
exitCode |= ERROR_FLAG;
if (missingClassCount > 0)
exitCode |= MISSING_CLASS_FLAG;
if (bugCount > 0)
exitCode |= BUGS_FOUND_FLAG;
System.exit(exitCode);
}
}
/**
* Print command line options synopses to stdout.
*/
public static void showCommandLineOptions() {
showCommandLineOptions(new TextUICommandLine());
}
public static void showCommandLineOptions(TextUICommandLine commandLine) {
System.out.println("Command line options:");
commandLine.printUsage(System.out);
}
public static void showSynopsis() {
System.out.println("Usage: findbugs [general options] -textui [command line options...] [jar/zip/class files, directories...]");
}
}
// vim:ts=4
| detectorTimings can't be nulled out between passes, just reallocate a new one.
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@4525 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
| findbugs/src/java/edu/umd/cs/findbugs/FindBugs.java | detectorTimings can't be nulled out between passes, just reallocate a new one. | <ide><path>indbugs/src/java/edu/umd/cs/findbugs/FindBugs.java
<ide> System.out.println(detectorName + ": " + detectorTime + " ms -> (" + (detectorTime * 100.0f / (float)total) + ") %");
<ide> }
<ide> System.out.println();
<del> detectorTimings = null;
<add> detectorTimings = new HashMap<String,Long>();
<ide> }
<ide>
<ide> // Callback for progress dialog: analysis finished |
|
Java | agpl-3.0 | e6af6cae8fe5dae88be0d4c64857b518d3222288 | 0 | qcri-social/AIDR,qcri-social/AIDR,qcri-social/AIDR,qcri-social/AIDR | package qa.qcri.aidr.manager.service.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.glassfish.jersey.jackson.JacksonFeature;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import qa.qcri.aidr.common.code.JacksonWrapper;
import qa.qcri.aidr.dbmanager.dto.CollectionDTO;
import qa.qcri.aidr.dbmanager.dto.NominalAttributeDTO;
import qa.qcri.aidr.dbmanager.dto.NominalLabelDTO;
import qa.qcri.aidr.dbmanager.dto.UsersDTO;
import qa.qcri.aidr.dbmanager.dto.taggerapi.TrainingDataDTO;
import qa.qcri.aidr.manager.dto.ModelHistoryWrapper;
import qa.qcri.aidr.manager.dto.PingResponse;
import qa.qcri.aidr.manager.dto.TaggerAllCrisesResponse;
import qa.qcri.aidr.manager.dto.TaggerAllCrisesTypesResponse;
import qa.qcri.aidr.manager.dto.TaggerAttribute;
import qa.qcri.aidr.manager.dto.TaggerCrisesAttribute;
import qa.qcri.aidr.manager.dto.TaggerCrisis;
import qa.qcri.aidr.manager.dto.TaggerCrisisAttributesResponse;
import qa.qcri.aidr.manager.dto.TaggerCrisisExist;
import qa.qcri.aidr.manager.dto.TaggerCrisisModelsResponse;
import qa.qcri.aidr.manager.dto.TaggerCrisisRequest;
import qa.qcri.aidr.manager.dto.TaggerCrisisType;
import qa.qcri.aidr.manager.dto.TaggerLabel;
import qa.qcri.aidr.manager.dto.TaggerLabelRequest;
import qa.qcri.aidr.manager.dto.TaggerModel;
import qa.qcri.aidr.manager.dto.TaggerModelFamily;
import qa.qcri.aidr.manager.dto.TaggerModelLabelsResponse;
import qa.qcri.aidr.manager.dto.TaggerModelNominalLabel;
import qa.qcri.aidr.manager.dto.TaggerResponseWrapper;
import qa.qcri.aidr.manager.dto.TaggerStatusResponse;
import qa.qcri.aidr.manager.dto.TaggerUser;
import qa.qcri.aidr.manager.dto.TaggersForCodes;
import qa.qcri.aidr.manager.dto.TaggersForCollectionsRequest;
import qa.qcri.aidr.manager.dto.TaggersForCollectionsResponse;
import qa.qcri.aidr.manager.dto.TaskAnswer;
import qa.qcri.aidr.manager.dto.TrainingDataRequest;
import qa.qcri.aidr.manager.exception.AidrException;
import qa.qcri.aidr.manager.persistence.entities.Collection;
import qa.qcri.aidr.manager.service.TaggerService;
import qa.qcri.aidr.manager.service.UserService;
import qa.qcri.aidr.manager.util.ManagerConfigurationProperty;
import qa.qcri.aidr.manager.util.ManagerConfigurator;
//import com.sun.jersey.api.client.Client;
//import com.sun.jersey.api.client.ClientResponse;
//import com.sun.jersey.api.client.WebResource;
@Service("taggerService")
public class TaggerServiceImpl implements TaggerService {
private Logger logger = Logger.getLogger(TaggerServiceImpl.class);
// @Autowired
// private Client client;
private static String taggerMainUrl;
private static String crowdsourcingAPIMainUrl;
private static String persisterMainUrl;
private static String outputAPIMainUrl;
@Autowired
private UserService userService;
TaggerServiceImpl() {
taggerMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.TAGGER_MAIN_URL);
crowdsourcingAPIMainUrl = ManagerConfigurator.getInstance()
.getProperty(ManagerConfigurationProperty.CROWDSOURCING_API_MAIN_URL);
persisterMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.PERSISTER_MAIN_URL);
outputAPIMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.OUTPUT_MAIN_URL);
}
// new DTOs introduced. -Imran
@Override
public List<TaggerCrisisType> getAllCrisisTypes() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received request to fetch all crisisTypes");
WebTarget webResource = client.target(taggerMainUrl
+ "/crisisType/all");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.error("URL: " + taggerMainUrl + " " + jsonResponse);
TaggerAllCrisesTypesResponse crisesTypesResponse = objectMapper
.readValue(jsonResponse, TaggerAllCrisesTypesResponse.class);
if (crisesTypesResponse.getCrisisTypes() != null) {
logger.info("Tagger returned "
+ crisesTypesResponse.getCrisisTypes().size()
+ " crises types");
}
return crisesTypesResponse.getCrisisTypes();
} catch (Exception e) {
logger.error("Error while getting all crisis from Tagger", e);
throw new AidrException(
"Error while getting all crisis from Tagger", e);
}
}
@Override
public List<TaggerCrisis> getCrisesByUserId(Integer userId)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis?userID=" + userId);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerAllCrisesResponse taggerAllCrisesResponse = objectMapper
.readValue(jsonResponse, TaggerAllCrisesResponse.class);
if (taggerAllCrisesResponse.getCrisises() != null) {
logger.info("Tagger returned "
+ taggerAllCrisesResponse.getCrisises().size()
+ " crisis for user");
}
return taggerAllCrisesResponse.getCrisises();
} catch (Exception e) {
logger.error("Exception while fetching crisis by userId: "+userId, e);
throw new AidrException(
"No collection is enabled for Tagger. Please enable tagger for one of your collections.",
e);
}
}
@Override
public String createNewCrises(TaggerCrisisRequest crisis)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
CollectionDTO dto = crisis.toDTO();
logger.info("Going to create new crisis: " + dto.getCode());
WebTarget webResource = client.target(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(dto),
Response.class);
return clientResponse.readEntity(String.class);
} catch (Exception e) {
logger.error("Error while creating new crisis: "+crisis.getName(), e);
throw new AidrException(
"Error while creating new crises in Tagger", e);
}
}
// (6)
@Override
public java.util.Collection<TaggerAttribute> getAttributesForCrises(Integer crisisID,
Integer userId) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/attribute/crisis/all?exceptCrisis=" + crisisID);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisAttributesResponse crisisAttributesResponse = objectMapper
.readValue(jsonResponse,
TaggerCrisisAttributesResponse.class);
if (crisisAttributesResponse.getCrisisAttributes() != null) {
logger.info("Tagger returned "
+ crisisAttributesResponse.getCrisisAttributes().size()
+ " attributes available for crises with ID "
+ crisisID);
} else {
return Collections.emptyList();
}
return convertTaggerCrisesAttributeToDTO(
crisisAttributesResponse.getCrisisAttributes(), userId);
} catch (Exception e) {
logger.error("Error while getting all attributes for crisis from Tagger for crisisID: "+crisisID, e);
throw new AidrException(
"Error while getting all attributes for crisis from Tagger",
e);
}
}
@Override
public Map<String, Integer> countCollectionsClassifiers(
List<String> collectionCodes) throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/crises");
String input = "";
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
input = objectMapper.writeValueAsString(collectionCodes);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(input));
String jsonResponse = clientResponse.readEntity(String.class);
HashMap<String, Integer> rv = objectMapper.readValue(jsonResponse,
HashMap.class);
return rv;
} catch (Exception e) {
logger.error("Error while getting amount of classifiers by collection codes in Tagger", e);
throw new AidrException(
"Error while getting amount of classifiers by collection codes in Tagger",
e);
}
}
@Override
public TaggerCrisisExist isCrisesExist(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisExist crisisExist = objectMapper.readValue(
jsonResponse, TaggerCrisisExist.class);
if (crisisExist.getCrisisId() != null) {
logger.info("Response from Tagger-API for Crises with the code "
+ code+ ", found crisisID = "+ crisisExist.getCrisisId());
return crisisExist;
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if crisis exist in Tagger for collection: "+code, e);
throw new AidrException(
"Error while checking if crisis exist in Tagger", e);
}
}
@Override
public Integer isUserExistsByUsername(String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/user/"
+ userName);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
UsersDTO user = objectMapper
.readValue(jsonResponse, UsersDTO.class);
TaggerUser taggerUser = new TaggerUser(user);
if (taggerUser != null && taggerUser.getUserID() != null) {
logger.info("User with the user name " + userName
+ " already exist in Tagger and has ID: "
+ taggerUser.getUserID());
return taggerUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if user: "+userName +"exist", e);
throw new AidrException(
"Error while checking if user exist in Tagger", e);
}
}
@Override
public Integer addNewUser(TaggerUser taggerUser) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/user");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(taggerUser
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
UsersDTO dto = objectMapper.readValue(jsonResponse, UsersDTO.class);
if (dto != null) {
TaggerUser createdUser = new TaggerUser(dto);
if (createdUser != null && createdUser.getUserID() != null) {
logger.info("User with ID " + createdUser.getUserID()
+ " was created in Tagger");
return createdUser.getUserID();
} else {
return null;
}
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while adding new user to Tagger", e);
throw new AidrException("Error while adding new user to Tagger", e);
}
}
@Override
public Integer addAttributeToCrisis(TaggerModelFamily modelFamily)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received add Attirbute request for crisis = "
+ modelFamily.getCrisis().getCrisisID() + ", attribute = "
+ modelFamily.getNominalAttribute().getNominalAttributeID());
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(modelFamily
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper responseWrapper = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
if (responseWrapper != null) {
Long modelFamilyIDLong = responseWrapper.getEntityID();
Integer modelFamilyID = new Long(modelFamilyIDLong).intValue();
if (modelFamilyID.intValue() > 0) {
logger.info("Attribute was added to crises: "
+ modelFamilyID);
return modelFamilyID;
} else {
logger.info("Attribute was NOT added to crises: ");
logger.info("Received message from tagger-api: "
+ responseWrapper.getStatusCode() + "\n"
+ responseWrapper.getMessage());
return null;
}
} else {
logger.info("Attribute was NOT added to crises: ");
return null;
}
} catch (Exception e) {
logger.error("Error while adding attribute to crises", e);
throw new AidrException("Error while adding attribute to crises", e);
}
}
@Override
public TaggerCrisis getCrisesByCode(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received request for crisis : " + code);
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/by-code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
logger.info("received response: " + jsonResponse);
CollectionDTO dto = null;
TaggerResponseWrapper response = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
if (response.getDataObject() != null) {
dto = objectMapper.readValue(objectMapper
.writeValueAsString(response.getDataObject()),
CollectionDTO.class);
}
logger.info("deserialization result: " + dto);
if (dto != null) {
TaggerCrisis crisis = new TaggerCrisis(dto);
if (crisis != null) {
logger.info("Tagger returned crisis with code"
+ crisis.getCode());
}
return crisis;
}
return null;
} catch (Exception e) {
logger.error("Error while getting crisis by code for collection: "+code, e);
return null;
// throw new
// AidrException("Error while getting crisis by code from Tagger",
// e);
}
}
@Override
public TaggerCrisis updateCode(TaggerCrisis crisis) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
CollectionDTO dto = crisis.toDTO();
WebTarget webResource = client.target(taggerMainUrl + "/crisis");
logger.info("Received update request for crisis = "
+ crisis.getCode() + ", dto = " + dto.getCode());
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).put(
Entity.json(objectMapper.writeValueAsString(dto)),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
CollectionDTO updatedDTO = objectMapper.readValue(jsonResponse,
CollectionDTO.class);
TaggerCrisis updatedCrisis = new TaggerCrisis(updatedDTO);
logger.info("Received response: " + updatedCrisis.getCode() + ", "
+ updatedCrisis.getName() + ","
+ updatedCrisis.getCrisisType().getCrisisTypeID());
if (updatedCrisis != null) {
logger.info("Crisis with id " + updatedCrisis.getCrisisID()
+ " was updated in Tagger");
}
return crisis;
} catch (Exception e) {
logger.error("Error while updating crisis: "+crisis.getCode(), e);
throw new AidrException(
"Error while updating crisis", e);
}
}
@Override
public List<TaggerModel> getModelsForCrisis(Integer crisisID)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
int retrainingThreshold = getCurrentRetrainingThreshold();
WebTarget webResource = client.target(taggerMainUrl
+ "/model/crisis/" + new Long(crisisID));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisModelsResponse crisisModelsResponse = objectMapper
.readValue(jsonResponse, TaggerCrisisModelsResponse.class);
logger.info("Tagger returned jsonResponse: " + jsonResponse);
if (crisisModelsResponse.getModelWrapper() != null) {
logger.info("Tagger returned "
+ crisisModelsResponse.getModelWrapper().size()
+ " models for crises with ID " + crisisID);
List<TaggerModel> tempTaggerModel = new ArrayList<TaggerModel>();
for (TaggerModel temp : crisisModelsResponse.getModelWrapper()) {
TaggerModel tm = new TaggerModel();
// System.out.println("reset0 : " + retrainingThreshold);
tm.setRetrainingThreshold(retrainingThreshold);
tm.setAttributeID(temp.getAttributeID());
tm.setModelID(temp.getModelID());
tm.setAttribute(temp.getAttribute());
tm.setAuc(temp.getAuc());
tm.setStatus(temp.getStatus());
tm.setTrainingExamples(temp.getTrainingExamples());
tm.setClassifiedDocuments(temp.getClassifiedDocuments());
tm.setModelFamilyID(temp.getModelFamilyID());
// System.out.println("reset : " +
// tm.getRetrainingThreshold());
tempTaggerModel.add(tm);
}
return tempTaggerModel;
}
return null;
} catch (Exception e) {
logger.error("Error while getting all models for crisisId:"+crisisID, e);
throw new AidrException(
"Error while getting all models for crisis from Tagger", e);
}
}
// (1)
@Override
public TaggerAttribute createNewAttribute(TaggerAttribute attribute)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(attribute
.toDTO())), Response.class);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute newAttribute = dto != null ? new TaggerAttribute(
dto) : null;
if (newAttribute != null) {
logger.info("Attribute with ID "
+ newAttribute.getNominalAttributeID()
+ " was created in Tagger");
}
return newAttribute;
} catch (Exception e) {
logger.error("Error while creating new attribute", e);
throw new AidrException(
"Error while creating new attribute in Tagger", e);
}
}
// (4)
@Override
public TaggerAttribute getAttributeInfo(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
logger.info("received request for nominal attribute id: " + id);
WebTarget webResource = client.target(taggerMainUrl + "/attribute/"
+ new Long(id));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute response = dto != null ? new TaggerAttribute(dto)
: null;
logger.info("Received response: " + response != null ? response
.getName() : "no attribute for id = " + id);
if (response != null) {
logger.info("Attribute with ID "
+ response.getNominalAttributeID()
+ " was retrieved from Tagger");
}
return response;
} catch (Exception e) {
logger.error("Error while getting attribute info for attribute: "+id, e);
throw new AidrException(
"Error while getting attribute from Tagger", e);
}
}
@Override
public TaggerLabel getLabelInfo(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl + "/label/"
+ id);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel response = new TaggerLabel(dto);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID()
+ " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
logger.error("Error while getting label info for labelId: "+id, e);
throw new AidrException("Error while getting label from Tagger", e);
}
}
// (3)
@Override
public boolean deleteAttribute(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute/"
+ id);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Attribute with ID " + id
+ " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while deleting a attribute for attributeId: "+id, e);
throw new AidrException("Error while deleting attribute in Tagger",
e);
}
}
@Override
public boolean deleteTrainingExample(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl
+ "/document/removeTrainingExample/" + new Long(id));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Document with ID " + id
+ " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while deleting document", e);
throw new AidrException("Error while deleting document in Tagger",
e);
}
}
@Override
public boolean removeAttributeFromCrises(Integer modelFamilyID)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
deletePybossaApp(modelFamilyID);
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/" + new Long(modelFamilyID));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Classifier was remove from crises by modelFamilyID: "
+ modelFamilyID);
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while removing classifier from crisis ", e);
throw new AidrException(
"Error while removing classifier from crisis in Tagger", e);
}
}
// (2)
@Override
public TaggerAttribute updateAttribute(TaggerAttribute attribute)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).put(
Entity.json(objectMapper.writeValueAsString(attribute
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute updatedAttribute = new TaggerAttribute(dto);
if (updatedAttribute != null) {
logger.info("Attribute with id "
+ updatedAttribute.getNominalAttributeID()
+ " was updated in Tagger");
} else {
return null;
}
return attribute;
} catch (Exception e) {
logger.error("Error while updating attribute: "+attribute.getCode(), e);
throw new AidrException("Error while updating attribute in Tagger",
e);
}
}
@Override
public TaggerLabel updateLabel(TaggerLabelRequest label)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
// WebResource webResource = client.resource(taggerMainUrl +
// "/label");
WebTarget webResource = client.target(taggerMainUrl + "/label");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON)
.put(Entity.json(objectMapper.writeValueAsString(label
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel updatedLabel = new TaggerLabel(dto);
if (updatedLabel != null) {
logger.info("Label with id " + updatedLabel.getNominalLabelID()
+ " was updated in Tagger");
} else {
return null;
}
return updatedLabel;
} catch (Exception e) {
logger.error("Error while updating label: "+label.getName(), e);
throw new AidrException("Error while updating label in Tagger", e);
}
}
@Override
public TaggerLabel createNewLabel(TaggerLabelRequest label)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/label");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON)
.post(Entity.json(objectMapper.writeValueAsString(label
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel response = new TaggerLabel(dto);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID()
+ " was created in Tagger");
return response;
} else {
logger.error("Error while creating new label: "+label.getName());
throw new AidrException(
"Error while creating new label in Tagger");
}
} catch (Exception e) {
logger.error("Error while creating new label: "+label.getName(), e);
throw new AidrException("Error while creating new label in Tagger",
e);
}
}
// (7)
@Override
public TaggerAttribute attributeExists(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/attribute/code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerAttribute attribute = objectMapper.readValue(jsonResponse,
TaggerAttribute.class);
if (attribute != null) {
logger.info("Attribute with the code " + code
+ " already exist in Tagger.");
return attribute;
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if attribute: "+code+" exist", e);
throw new AidrException(
"Error while checking if attribute exist in Tagger", e);
}
}
@Override
public List<TrainingDataDTO> getTrainingDataByModelIdAndCrisisId(
Integer modelFamilyId, Integer crisisId, Integer start,
Integer limit, String sortColumn, String sortDirection)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
logger.info("Received request for fetching training data for crisisID = "
+ crisisId + "and modelFamilyId = " + modelFamilyId);
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/getTrainingData?crisisID=" + new Long(crisisId)
+ "&modelFamilyID=" + new Long(modelFamilyId)
+ "&fromRecord=" + start + "&limit=" + limit
+ "&sortColumn=" + sortColumn + "&sortDirection="
+ sortDirection);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
TrainingDataRequest trainingDataRequest = objectMapper.readValue(
jsonResponse, TrainingDataRequest.class);
if (trainingDataRequest != null
&& trainingDataRequest.getTrainingData() != null) {
logger.info("Tagger returned "
+ trainingDataRequest.getTrainingData().size()
+ " training data records for crisis with ID: "
+ crisisId + " and family model with ID: "
+ modelFamilyId);
return trainingDataRequest.getTrainingData();
} else {
return null;
}
} catch (Exception e) {
logger.error(crisisId + " Error while Getting training data for Crisis and Model", e);
throw new AidrException(
"Error while Getting training data for Crisis and Model.",
e);
}
}
@Override
public String getAssignableTask(Integer id, String userName)
throws AidrException {
Integer taggerUserId = isUserExistsByUsername(userName);
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// taskBufferNumber currently always 1
int taskBufferNumber = 1;
// WebResource webResource = client.resource(crowdsourcingAPIMainUrl
// + "/taskbuffer/getassignabletask/" + userName + "/" + id + "/" +
// taskBufferNumber);
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/document/getassignabletask/" + userName + "/" + id
+ "/" + taskBufferNumber);
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getAssignableTask - clientResponse : "
+ clientResponse);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting Assignable Task in Tagger for user: "+userName, e);
throw new AidrException(
"Error while getting Assignable Task in Tagger", e);
}
}
@Override
public String getTemplateStatus(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/template/status/crisis/code/" + code);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getTemplateStatus - clientResponse : "
+ clientResponse);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting Template Status in Tagger for code:"+code, e);
throw new AidrException(
"Error while getting Template Status in Tagger", e);
}
}
@Override
public String skipTask(Integer id, String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// WebResource webResource = client.resource(crowdsourcingAPIMainUrl
// + "/taskassignment/revert/searchByDocUserName/" + userName + "/"
// + id);
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/taskassignment/revert/searchByDocUserName/" + userName
+ "/" + id);
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("Skipping task: " + id + " for user = " + userName);
logger.info("skipTask - clientResponse : " + clientResponse);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while Skip Task operation for user: "+userName, e);
throw new AidrException("Error while Skip Task operation", e);
}
}
@Override
public boolean saveTaskAnswer(List<TaskAnswer> taskAnswer)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/taskanswer/save");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
logger.info("saveTaskAnswer - postData : "
+ objectMapper.writeValueAsString(taskAnswer));
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(taskAnswer)),
Response.class);
//logger.info("saveTaskAnswer - response status : "
// + clientResponse.getStatus());
return clientResponse.getStatus() == 200
|| clientResponse.getStatus() == 204;
} catch (Exception e) {
logger.error("Error while saving TaskAnswer in AIDRCrowdsourcing", e);
return true;
}
}
@Override
public String loadLatestTweets(String code, String constraints)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count=1000");
logger.info("Invoking: " + outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count=1000 constraints:" + constraints);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(constraints),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
if (jsonResponse != null
&& (jsonResponse.startsWith("{") || jsonResponse
.startsWith("["))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
logger.error("Error while generating loading latest tweets for the collection: "+code, e);
throw new AidrException(
"Error while generating Tweet Ids link in taggerPersister",
e);
}
}
@Override
public ModelHistoryWrapper getModelHistoryByModelFamilyID(Integer start,
Integer limit, Integer id, String sortColumn, String sortDirection) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// WebResource webResource = client.resource(taggerMainUrl +
// "/model/modelFamily/" + id
// + "?start=" + start
// + "&limit=" + limit);
WebTarget webResource = client.target(taggerMainUrl
+ "/model/modelFamily/" + id + "?start=" + start
+ "&limit=" + limit+ "&sortColumn=" + sortColumn + "&sortDirection="
+ sortDirection);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
ModelHistoryWrapper modelHistoryWrapper = objectMapper.readValue(
jsonResponse, ModelHistoryWrapper.class);
return modelHistoryWrapper;
} catch (Exception e) {
logger.error("Error while Getting history records for Mode", e);
throw new AidrException(
"Error while Getting history records for Model.", e);
}
}
@Override
public List<TaggerModelNominalLabel> getAllLabelsForModel(Integer modelID,
String crisisCode) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
//logger.info("received request for modelID = " + modelID);
WebTarget webResource = client.target(taggerMainUrl
+ "/modelNominalLabel/" + modelID + "/" + crisisCode);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerModelLabelsResponse modelLabelsResponse = objectMapper
.readValue(jsonResponse, TaggerModelLabelsResponse.class);
if (modelLabelsResponse.getModelNominalLabelsDTO() != null) {
logger.info("Tagger returned "
+ modelLabelsResponse.getModelNominalLabelsDTO().size()
+ " labels for model with ID " + modelID);
for (TaggerModelNominalLabel dto : modelLabelsResponse
.getModelNominalLabelsDTO()) {
logger.info("Training count for crisis = " + crisisCode
+ ", label: " + dto.getNominalLabel().getName()
+ " is = " + dto.getTrainingDocuments());
}
}
return modelLabelsResponse.getModelNominalLabelsDTO();
} catch (Exception e) {
logger.error("Error while getting all labels for model from Tagger for the crisis: "+crisisCode, e);
throw new AidrException(
"Error while getting all labels for model from Tagger", e);
}
}
@Override
public String getRetainingThreshold() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/train/samplecountthreshold");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Exception while getting training sampleCountThreshold", e);
throw new AidrException("getRetainingThreshold : ", e);
}
}
@Override
public Map<String, Integer> getTaggersForCollections(
List<String> collectionCodes) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
// WebResource webResource = client.resource(taggerMainUrl +
// "/modelfamily/taggers-by-codes");
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/taggers-by-codes");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource
.request(MediaType.APPLICATION_JSON)
.post(Entity.json(objectMapper
.writeValueAsString(new TaggersForCollectionsRequest(
collectionCodes))), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
TaggersForCollectionsResponse taggersResponse = objectMapper
.readValue(jsonResponse,
TaggersForCollectionsResponse.class);
if (taggersResponse != null
&& !taggersResponse.getTaggersForCodes().isEmpty()) {
Map<String, Integer> result = new HashMap<String, Integer>();
for (TaggersForCodes taggerForCode : taggersResponse
.getTaggersForCodes()) {
result.put(taggerForCode.getCode(),
taggerForCode.getCount());
}
return result;
} else {
return Collections.emptyMap();
}
} catch (Exception e) {
logger.error("Error while getting taggers for collections", e);
throw new AidrException("Error while getting taggers for collections", e);
}
}
@Override
public boolean pingTagger() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/misc/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging tagger", e);
throw new AidrException(
"Error while pinging tagger.",e);
}
}
@Override
public boolean pingTrainer() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/util/ping/heartbeat");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null && "200".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging trainer", e);
throw new AidrException(
"Error while pinging trainer.",e);
}
}
@Override
public boolean pingAIDROutput() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(outputAPIMainUrl
+ "/manage/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging output", e);
throw new AidrException(
"Error while pinging output",
e);
}
}
@Override
public boolean pingPersister() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(persisterMainUrl
+ "/persister/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging persister", e);
throw new AidrException(
"Error while pinging persister",e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateCSVLink(String code)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genCSV?collectionCode=" + code
+ "&exportLimit=100000");
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating CSV link in Persister for collection: "+code, e);
throw new AidrException(
"[generateCSVLink] Error while generating CSV link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateTweetIdsLink(String code)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
logger.info("[generateTweetIdsLink] Received request for code: "
+ code);
try {
/*System.out.println("Invoked URL: " + persisterMainUrl
+ "/taggerPersister/genTweetIds?collectionCode=" + code
+ "&downloadLimited=true");*/
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genTweetIds?collectionCode=" + code
+ "&downloadLimited=true");
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating Tweet Ids link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsLink] Error while generating Tweet Ids link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJSONLink(String code, String jsonType)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genJson?collectionCode=" + code
+ "&exportLimit=100000" + "&jsonType=" + jsonType);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.readEntity(String.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating JSON download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJSONLink] Error while generating JSON download link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateJsonTweetIdsLink(String code,
String jsonType) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genJsonTweetIds?collectionCode=" + code
+ "&downloadLimited=true&" + "&jsonType=" + jsonType);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsLink] Error while generating JSON Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateCSVFilteredLink(String code,
String queryString, String userName, Integer count, boolean removeRetweet) throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
String url = null;
Response clientResponse = null;
url = persisterMainUrl + "/taggerPersister/filter/genCSV?collectionCode=" + code + "&exportLimit="
+ count + "&userName=" + userName + "&removeRetweet=" + removeRetweet;
WebTarget webResource = client.target(url);
clientResponse = webResource.request(MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.info("Error while generating csv filtered download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateCSVFilteredLink] Error while generating csv filtered download link in Persister for collection",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateTweetIdsFilteredLink(String code,
String queryString, String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: "+ code);
try {
logger.info("[generateTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genTweetIds?collectionCode="
+ code + "&downloadLimited=true&userName=" + userName);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genTweetIds?collectionCode="
+ code + "&downloadLimited=true&userName=" + userName);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
//logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtererd Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsFilteredLink] Error while generating filtererd Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateTweetIdsOnlyFilteredLink(String code, String queryString, String userName, Integer count, Boolean removeRetweet) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
logger.info("[generateTweetIdsOnlyFilteredLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + count + "&removeRetweet=" + removeRetweet);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + count + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtererd Tweet Ids Only download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsOnlyFilteredLink] Error while generating filtererd Tweet Ids Only download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJSONFilteredLink(String code,
String queryString, String jsonType, String userName, Integer count, boolean removeRetweet)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJson?collectionCode=" + code
+ "&exportLimit=" + count + "&jsonType=" + jsonType
+ "&userName=" + userName + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating filtered JSON download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJSONFilteredLink] Error while generating filtered JSON download link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateJsonTweetIdsFilteredLink(String code,
String queryString, String jsonType, String userName)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: " + code);
try {
logger.info("[generateJsonTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIds?collectionCode="
+ code + "&downloadLimited=true&" + "&jsonType=" + jsonType
+ "&userName=" + userName);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIds?collectionCode="
+ code + "&downloadLimited=true&" + "&jsonType=" + jsonType
+ "&userName=" + userName);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
//logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating filtered JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsFilteredLink] Error while generating filtered JSON Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJsonTweetIdsOnlyFilteredLink(String code,
String queryString, String jsonType, String userName, Integer exportLimit, Boolean removeRetweet)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: " + code);
try {
logger.info("[generateJsonTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + exportLimit + "&jsonType="
+ jsonType + "&removeRetweet=" + removeRetweet);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + exportLimit + "&jsonType="
+ jsonType + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtered JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsFilteredLink] Error while generating filtered JSON Tweet Ids download link in Persister",
e);
}
}
private java.util.Collection<TaggerAttribute> convertTaggerCrisesAttributeToDTO(
List<TaggerCrisesAttribute> attributes, Integer userId) {
Map<Integer, TaggerAttribute> result = new HashMap<Integer, TaggerAttribute>();
for (TaggerCrisesAttribute a : attributes) {
if (!result.containsKey(a.getNominalAttributeID())) {
if (!userId.equals(a.getUserID())
&& !(new Integer(1)).equals(a.getUserID())) {
continue;
}
TaggerUser user = new TaggerUser(a.getUserID());
List<TaggerLabel> labels = new ArrayList<TaggerLabel>();
TaggerLabel label = new TaggerLabel(a.getLabelName(),
a.getLabelID());
labels.add(label);
TaggerAttribute taggerAttribute = new TaggerAttribute(
a.getCode(), a.getDescription(), a.getName(),
a.getNominalAttributeID(), user, labels);
result.put(a.getNominalAttributeID(), taggerAttribute);
} else {
TaggerAttribute taggerAttribute = result.get(a
.getNominalAttributeID());
List<TaggerLabel> labels = taggerAttribute
.getNominalLabelCollection();
TaggerLabel label = new TaggerLabel(a.getLabelName(),
a.getLabelID());
labels.add(label);
}
}
logger.info("Created attributes collection of size = " + result.size());
//logger.info(result);
return result.values();
}
private int getCurrentRetrainingThreshold() throws Exception {
try {
String retrainingThreshold = this.getRetainingThreshold();
ObjectMapper mapper = JacksonWrapper.getObjectMapper();
JsonFactory factory = mapper.getJsonFactory(); // since 2.1 use
// mapper.getFactory()
// instead
JsonParser jp = factory.createJsonParser(retrainingThreshold);
JsonNode actualObj = mapper.readTree(jp);
JsonNode nameNode = actualObj.get("sampleCountThreshold");
int sampleCountThreshold = Integer.parseInt(nameNode.asText());
return sampleCountThreshold;
} catch (Exception e) {
logger.error("Exception while getting CurrentRetrainingThreshold", e);
return 50;
}
}
private void deletePybossaApp(Integer modelFamilyID) {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
//System.out.print("removeAttributeFromCrises: starting ......................................");
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/" + modelFamilyID);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response resp = webResource.request(MediaType.APPLICATION_JSON)
.get();
String jsonResp = resp.readEntity(String.class);
TaggerModelFamily tm = objectMapper.readValue(jsonResp,
TaggerModelFamily.class);
if (tm != null && tm.getCrisis() != null
&& tm.getNominalAttribute() != null) {
String crisisCode = tm.getCrisis().getCode();
String attributeCode = tm.getNominalAttribute().getCode();
logger.info("crisisCode: " + crisisCode + " attributeCode: " + attributeCode);
WebTarget webResp = client.target(crowdsourcingAPIMainUrl
+ "/clientapp/delete/" + crisisCode + "/"
+ attributeCode);
Response clientResp = webResource.request(
MediaType.APPLICATION_JSON).get();
//logger.info("deactivated - clientResponse : " + clientResp);
} else {
logger.info("No modelfamily found for id = " + modelFamilyID);
}
} catch (Exception e) {
logger.error("deactivated - deletePybossaApp : " + e);
}
}
@Override
public String getAttributesAndLabelsByCrisisId(Integer id)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// http://example.com:8084/AIDRTrainerAPI/rest/crisis/id/1234
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/crisis/id/" + id);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getAssignableTask - clientResponse : "
+ clientResponse);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting all nominal attributes and their labels for a given crisisID: "+id, e);
throw new AidrException(
"Error while getting all nominal attributes and their labels for a given crisisID",
e);
}
}
@Override
public int trashCollection(Collection collection) throws Exception {
int retVal = 0;
Long crisisID = -1L;
//logger.info("[trashCollection] request received for collection: "
//+ collection.getCode());
// First clean up the aidr-predict database of documents
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client
.target(taggerMainUrl + "/manage/collection/trash/crisis/" + collection.getCode());
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("[trashCollection] response from tagger-api: "
//+ jsonResponse);
if (jsonResponse != null && jsonResponse.contains("TRASHED")) {
retVal = 1;
crisisID = Long.parseLong(jsonResponse.substring(
jsonResponse.indexOf(":") + 1,
jsonResponse.indexOf("}")));
} else {
retVal = 0;
}
} catch (Exception e) {
logger.error("Error while attempting trash a collection: ", e);
throw new AidrException(
"Error while deleting a collection from aidr-predict database",
e);
}
//logger.info("[trashCollection] result of cleaning aidr-predict: "
//+ crisisID);
if (retVal > 0 && crisisID < 0) {
return 1; // crisis does not exist in aidr_predict table. Reason: no
// classifier attached
}
if (retVal > 0 && crisisID > 0) {
// Final DB task - cleanup the aidr-scheduler database of
// micromapper tasks
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/clientapp/delete/crisis/" + crisisID);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("[trashCollection] response from trainer-api: "
//+ jsonResponse);
if (jsonResponse != null
&& jsonResponse.equalsIgnoreCase("{\"status\":200}")) {
logger.info("[trashCollection] Success in trashing ");
return 1;
} else {
return 0;
}
} catch (Exception e) {
logger.error("Error while attempting trash REST call for aidr_scheduler for collection: ", e);
throw new AidrException(
"Error while attempting trash REST call for aidr_scheduler",
e);
}
}
return 0;
}
@Override
public int untrashCollection(String collectionCode) throws Exception {
//System.out.println("[untrashCollection] request received for collection: "+ collectionCode);
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(taggerMainUrl
+ "/manage/collection/untrash/crisis/" + collectionCode);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//System.out.println("[untrashCollection] response from tagger-api: "
// + jsonResponse);
if (jsonResponse != null
&& jsonResponse
.equalsIgnoreCase("{\"status\": \"UNTRASHED\"}")) {
logger.info("[trashCollection] Success in untrashing + "
+ collectionCode);
return 1;
} else {
return 0;
}
} catch (Exception e) {
logger.error("Error while attempting /untrash REST call for collection: "+collectionCode, e);
throw new AidrException(
"Error while attempting /untrash REST call", e);
}
}
@Override
public String loadLatestTweetsWithCount(String code, int count)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
String constraints = "{\"constraints\":[]}";
WebTarget webResource = client.target(outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count="
+ count);
/*System.out.println("[loadLatestTweetsWithCount] Invoking: "
+ outputAPIMainUrl + "/crisis/fetch/channel/filter/" + code
+ "?count=" + count);
System.out.println("[loadLatestTweetsWithCount] constraints: "
+ constraints);*/
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(constraints),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
if (jsonResponse != null
&& (jsonResponse.startsWith("{") || jsonResponse
.startsWith("["))) {
/*System.out
.println("[loadLatestTweetsWithCount] jsonResponse for collection "
+ code + ": " + jsonResponse);*/
return jsonResponse;
} else {
/*System.out
.println("[loadLatestTweetsWithCount] jsonResponse for collection "
+ code + ": \"\"");*/
return "";
}
} catch (Exception e) {
logger.error("Error while loadLatestTweetsWithCount for collection: "+code, e);
throw new AidrException("Error while loadLatestTweetsWithCount", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisID(
Long crisisID, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "?count="
+ count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisCode(
String crisisCode, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisCode/" + crisisCode + "?count="
+ count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisis code = "
+ crisisCode + " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisis code = "
+ crisisCode + " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisIDUserID(
Long crisisID, Long userID, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "/userID/"
+ userID + "?count=" + count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + ", userId = " + userID
+ " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + ", userId = " + userID
+ " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisIDUserName(
Long crisisID, String userName, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "/userName/"
+ userName + "?count=" + count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + ", user name = " + userName
+ " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + ", user name = " + userName
+ " from Tagger", e);
}
}
@Override
public Map<String, Object> downloadHumanLabeledDocumentsByCrisisUserName(
String queryString, String crisisCode, String userName,
Integer count, String fileType, String contentType)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
String targetURL = taggerMainUrl
+ "/misc/humanLabeled/download/crisis/" + crisisCode
+ "/userName/" + userName + "?count=" + count
+ "&fileType=" + fileType + "&contentType=" + contentType;
logger.info("Going to invoke REST API: " + targetURL + " POST body: " + queryString);
try {
// Rest call to Tagger
WebTarget webResource = client.target(targetURL);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("Response = " + jsonResponse);
TaggerResponseWrapper response = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ response.getTotal());
Map<String, Object> retVal = new HashMap<String, Object>();
retVal.put("fileName", response.getMessage());
retVal.put("total", response.getTotal());
return retVal;
} catch (Exception e) {
logger.error("Error while getting download link for human labeled documents for crisis code = "
+ crisisCode+ ", user name = "+ userName+ " from Tagger", e);
throw new AidrException("Error while getting download link for human labeled documents for crisis code = "
+ crisisCode+ ", user name = "+ userName+ " from Tagger", e);
}
}
@Override
public Map<String, Object> updateMicromapperEnabled(String code, Boolean isMicromapperEnabled) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/update/micromapperEnabled/" + code +"/"+isMicromapperEnabled);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Exception while updating isMicromapperEnabled ", e);
throw new AidrException("Exception while updating isMicromapperEnabled ",
e);
}
}
@Override
public Boolean sendMailService(String subject, String body){
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
Response clientResponse = null;
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/sendEmail");
Form form = new Form();
form.param("subject", subject);
form.param("body", body);
clientResponse = webResource.request().post(
Entity.entity(form,MediaType.APPLICATION_FORM_URLENCODED),Response.class);
if (clientResponse.getStatus() != 200) {
logger.warn("Couldn't contact AIDRTaggerAPI for sending error message");
return false;
}
} catch (Exception e) {
logger.error("Error in contacting AIDRTaggerAPI: " + clientResponse);
return false;
}
return true;
}
@Override
public Long getLabelCount(Long collectionId) {
Long labelCount = 0L;
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
Response clientResponse = null;
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/label/collection/" + collectionId);
clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
labelCount = clientResponse.readEntity(Long.class);
if (clientResponse.getStatus() != 200) {
logger.warn("Couldn't contact AIDRTaggerAPI for sending error message");
}
} catch (Exception e) {
logger.error("Error in contacting AIDRTaggerAPI: " + clientResponse, e);
}
return labelCount;
}
}
| aidr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java | package qa.qcri.aidr.manager.service.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.glassfish.jersey.jackson.JacksonFeature;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import qa.qcri.aidr.common.code.JacksonWrapper;
import qa.qcri.aidr.dbmanager.dto.CollectionDTO;
import qa.qcri.aidr.dbmanager.dto.NominalAttributeDTO;
import qa.qcri.aidr.dbmanager.dto.NominalLabelDTO;
import qa.qcri.aidr.dbmanager.dto.UsersDTO;
import qa.qcri.aidr.dbmanager.dto.taggerapi.TrainingDataDTO;
import qa.qcri.aidr.manager.dto.ModelHistoryWrapper;
import qa.qcri.aidr.manager.dto.PingResponse;
import qa.qcri.aidr.manager.dto.TaggerAllCrisesResponse;
import qa.qcri.aidr.manager.dto.TaggerAllCrisesTypesResponse;
import qa.qcri.aidr.manager.dto.TaggerAttribute;
import qa.qcri.aidr.manager.dto.TaggerCrisesAttribute;
import qa.qcri.aidr.manager.dto.TaggerCrisis;
import qa.qcri.aidr.manager.dto.TaggerCrisisAttributesResponse;
import qa.qcri.aidr.manager.dto.TaggerCrisisExist;
import qa.qcri.aidr.manager.dto.TaggerCrisisModelsResponse;
import qa.qcri.aidr.manager.dto.TaggerCrisisRequest;
import qa.qcri.aidr.manager.dto.TaggerCrisisType;
import qa.qcri.aidr.manager.dto.TaggerLabel;
import qa.qcri.aidr.manager.dto.TaggerLabelRequest;
import qa.qcri.aidr.manager.dto.TaggerModel;
import qa.qcri.aidr.manager.dto.TaggerModelFamily;
import qa.qcri.aidr.manager.dto.TaggerModelLabelsResponse;
import qa.qcri.aidr.manager.dto.TaggerModelNominalLabel;
import qa.qcri.aidr.manager.dto.TaggerResponseWrapper;
import qa.qcri.aidr.manager.dto.TaggerStatusResponse;
import qa.qcri.aidr.manager.dto.TaggerUser;
import qa.qcri.aidr.manager.dto.TaggersForCodes;
import qa.qcri.aidr.manager.dto.TaggersForCollectionsRequest;
import qa.qcri.aidr.manager.dto.TaggersForCollectionsResponse;
import qa.qcri.aidr.manager.dto.TaskAnswer;
import qa.qcri.aidr.manager.dto.TrainingDataRequest;
import qa.qcri.aidr.manager.exception.AidrException;
import qa.qcri.aidr.manager.persistence.entities.Collection;
import qa.qcri.aidr.manager.service.TaggerService;
import qa.qcri.aidr.manager.service.UserService;
import qa.qcri.aidr.manager.util.ManagerConfigurationProperty;
import qa.qcri.aidr.manager.util.ManagerConfigurator;
//import com.sun.jersey.api.client.Client;
//import com.sun.jersey.api.client.ClientResponse;
//import com.sun.jersey.api.client.WebResource;
@Service("taggerService")
public class TaggerServiceImpl implements TaggerService {
private Logger logger = Logger.getLogger(TaggerServiceImpl.class);
// @Autowired
// private Client client;
private static String taggerMainUrl;
private static String crowdsourcingAPIMainUrl;
private static String persisterMainUrl;
private static String outputAPIMainUrl;
@Autowired
private UserService userService;
TaggerServiceImpl() {
taggerMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.TAGGER_MAIN_URL);
crowdsourcingAPIMainUrl = ManagerConfigurator.getInstance()
.getProperty(ManagerConfigurationProperty.CROWDSOURCING_API_MAIN_URL);
persisterMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.PERSISTER_MAIN_URL);
outputAPIMainUrl = ManagerConfigurator.getInstance().getProperty(
ManagerConfigurationProperty.OUTPUT_MAIN_URL);
}
// new DTOs introduced. -Imran
@Override
public List<TaggerCrisisType> getAllCrisisTypes() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received request to fetch all crisisTypes");
WebTarget webResource = client.target(taggerMainUrl
+ "/crisisType/all");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.error("URL: " + taggerMainUrl + " " + jsonResponse);
TaggerAllCrisesTypesResponse crisesTypesResponse = objectMapper
.readValue(jsonResponse, TaggerAllCrisesTypesResponse.class);
if (crisesTypesResponse.getCrisisTypes() != null) {
logger.info("Tagger returned "
+ crisesTypesResponse.getCrisisTypes().size()
+ " crises types");
}
return crisesTypesResponse.getCrisisTypes();
} catch (Exception e) {
logger.error("Error while getting all crisis from Tagger", e);
throw new AidrException(
"Error while getting all crisis from Tagger", e);
}
}
@Override
public List<TaggerCrisis> getCrisesByUserId(Integer userId)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis?userID=" + userId);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerAllCrisesResponse taggerAllCrisesResponse = objectMapper
.readValue(jsonResponse, TaggerAllCrisesResponse.class);
if (taggerAllCrisesResponse.getCrisises() != null) {
logger.info("Tagger returned "
+ taggerAllCrisesResponse.getCrisises().size()
+ " crisis for user");
}
return taggerAllCrisesResponse.getCrisises();
} catch (Exception e) {
logger.error("Exception while fetching crisis by userId: "+userId, e);
throw new AidrException(
"No collection is enabled for Tagger. Please enable tagger for one of your collections.",
e);
}
}
@Override
public String createNewCrises(TaggerCrisisRequest crisis)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
CollectionDTO dto = crisis.toDTO();
logger.info("Going to create new crisis: " + dto.getCode());
WebTarget webResource = client.target(taggerMainUrl + "/crisis");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(dto),
Response.class);
return clientResponse.readEntity(String.class);
} catch (Exception e) {
logger.error("Error while creating new crisis: "+crisis.getName(), e);
throw new AidrException(
"Error while creating new crises in Tagger", e);
}
}
// (6)
@Override
public java.util.Collection<TaggerAttribute> getAttributesForCrises(Integer crisisID,
Integer userId) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/attribute/crisis/all?exceptCrisis=" + crisisID);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisAttributesResponse crisisAttributesResponse = objectMapper
.readValue(jsonResponse,
TaggerCrisisAttributesResponse.class);
if (crisisAttributesResponse.getCrisisAttributes() != null) {
logger.info("Tagger returned "
+ crisisAttributesResponse.getCrisisAttributes().size()
+ " attributes available for crises with ID "
+ crisisID);
} else {
return Collections.emptyList();
}
return convertTaggerCrisesAttributeToDTO(
crisisAttributesResponse.getCrisisAttributes(), userId);
} catch (Exception e) {
logger.error("Error while getting all attributes for crisis from Tagger for crisisID: "+crisisID, e);
throw new AidrException(
"Error while getting all attributes for crisis from Tagger",
e);
}
}
@Override
public Map<String, Integer> countCollectionsClassifiers(
List<String> collectionCodes) throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/crises");
String input = "";
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
input = objectMapper.writeValueAsString(collectionCodes);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(input));
String jsonResponse = clientResponse.readEntity(String.class);
HashMap<String, Integer> rv = objectMapper.readValue(jsonResponse,
HashMap.class);
return rv;
} catch (Exception e) {
logger.error("Error while getting amount of classifiers by collection codes in Tagger", e);
throw new AidrException(
"Error while getting amount of classifiers by collection codes in Tagger",
e);
}
}
@Override
public TaggerCrisisExist isCrisesExist(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisExist crisisExist = objectMapper.readValue(
jsonResponse, TaggerCrisisExist.class);
if (crisisExist.getCrisisId() != null) {
logger.info("Response from Tagger-API for Crises with the code "
+ code+ ", found crisisID = "+ crisisExist.getCrisisId());
return crisisExist;
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if crisis exist in Tagger for collection: "+code, e);
throw new AidrException(
"Error while checking if crisis exist in Tagger", e);
}
}
@Override
public Integer isUserExistsByUsername(String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/user/"
+ userName);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
UsersDTO user = objectMapper
.readValue(jsonResponse, UsersDTO.class);
TaggerUser taggerUser = new TaggerUser(user);
if (taggerUser != null && taggerUser.getUserID() != null) {
logger.info("User with the user name " + userName
+ " already exist in Tagger and has ID: "
+ taggerUser.getUserID());
return taggerUser.getUserID();
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if user: "+userName +"exist", e);
throw new AidrException(
"Error while checking if user exist in Tagger", e);
}
}
@Override
public Integer addNewUser(TaggerUser taggerUser) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/user");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(taggerUser
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
UsersDTO dto = objectMapper.readValue(jsonResponse, UsersDTO.class);
if (dto != null) {
TaggerUser createdUser = new TaggerUser(dto);
if (createdUser != null && createdUser.getUserID() != null) {
logger.info("User with ID " + createdUser.getUserID()
+ " was created in Tagger");
return createdUser.getUserID();
} else {
return null;
}
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while adding new user to Tagger", e);
throw new AidrException("Error while adding new user to Tagger", e);
}
}
@Override
public Integer addAttributeToCrisis(TaggerModelFamily modelFamily)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received add Attirbute request for crisis = "
+ modelFamily.getCrisis().getCrisisID() + ", attribute = "
+ modelFamily.getNominalAttribute().getNominalAttributeID());
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(modelFamily
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper responseWrapper = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
if (responseWrapper != null) {
Long modelFamilyIDLong = responseWrapper.getEntityID();
Integer modelFamilyID = new Long(modelFamilyIDLong).intValue();
if (modelFamilyID.intValue() > 0) {
logger.info("Attribute was added to crises: "
+ modelFamilyID);
return modelFamilyID;
} else {
logger.info("Attribute was NOT added to crises: ");
logger.info("Received message from tagger-api: "
+ responseWrapper.getStatusCode() + "\n"
+ responseWrapper.getMessage());
return null;
}
} else {
logger.info("Attribute was NOT added to crises: ");
return null;
}
} catch (Exception e) {
logger.error("Error while adding attribute to crises", e);
throw new AidrException("Error while adding attribute to crises", e);
}
}
@Override
public TaggerCrisis getCrisesByCode(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
logger.info("Received request for crisis : " + code);
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/by-code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
logger.info("received response: " + jsonResponse);
CollectionDTO dto = null;
TaggerResponseWrapper response = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
if (response.getDataObject() != null) {
dto = objectMapper.readValue(objectMapper
.writeValueAsString(response.getDataObject()),
CollectionDTO.class);
}
logger.info("deserialization result: " + dto);
if (dto != null) {
TaggerCrisis crisis = new TaggerCrisis(dto);
if (crisis != null) {
logger.info("Tagger returned crisis with code"
+ crisis.getCode());
}
return crisis;
}
return null;
} catch (Exception e) {
logger.error("Error while getting crisis by code for collection: "+code, e);
return null;
// throw new
// AidrException("Error while getting crisis by code from Tagger",
// e);
}
}
@Override
public TaggerCrisis updateCode(TaggerCrisis crisis) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
CollectionDTO dto = crisis.toDTO();
WebTarget webResource = client.target(taggerMainUrl + "/crisis");
logger.info("Received update request for crisis = "
+ crisis.getCode() + ", dto = " + dto.getCode());
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).put(
Entity.json(objectMapper.writeValueAsString(dto)),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
CollectionDTO updatedDTO = objectMapper.readValue(jsonResponse,
CollectionDTO.class);
TaggerCrisis updatedCrisis = new TaggerCrisis(updatedDTO);
logger.info("Received response: " + updatedCrisis.getCode() + ", "
+ updatedCrisis.getName() + ","
+ updatedCrisis.getCrisisType().getCrisisTypeID());
if (updatedCrisis != null) {
logger.info("Crisis with id " + updatedCrisis.getCrisisID()
+ " was updated in Tagger");
}
return crisis;
} catch (Exception e) {
logger.error("Error while updating crisis: "+crisis.getCode(), e);
throw new AidrException(
"Error while updating crisis", e);
}
}
@Override
public List<TaggerModel> getModelsForCrisis(Integer crisisID)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
int retrainingThreshold = getCurrentRetrainingThreshold();
WebTarget webResource = client.target(taggerMainUrl
+ "/model/crisis/" + new Long(crisisID));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerCrisisModelsResponse crisisModelsResponse = objectMapper
.readValue(jsonResponse, TaggerCrisisModelsResponse.class);
logger.info("Tagger returned jsonResponse: " + jsonResponse);
if (crisisModelsResponse.getModelWrapper() != null) {
logger.info("Tagger returned "
+ crisisModelsResponse.getModelWrapper().size()
+ " models for crises with ID " + crisisID);
List<TaggerModel> tempTaggerModel = new ArrayList<TaggerModel>();
for (TaggerModel temp : crisisModelsResponse.getModelWrapper()) {
TaggerModel tm = new TaggerModel();
// System.out.println("reset0 : " + retrainingThreshold);
tm.setRetrainingThreshold(retrainingThreshold);
tm.setAttributeID(temp.getAttributeID());
tm.setModelID(temp.getModelID());
tm.setAttribute(temp.getAttribute());
tm.setAuc(temp.getAuc());
tm.setStatus(temp.getStatus());
tm.setTrainingExamples(temp.getTrainingExamples());
tm.setClassifiedDocuments(temp.getClassifiedDocuments());
tm.setModelFamilyID(temp.getModelFamilyID());
// System.out.println("reset : " +
// tm.getRetrainingThreshold());
tempTaggerModel.add(tm);
}
return tempTaggerModel;
}
return null;
} catch (Exception e) {
logger.error("Error while getting all models for crisisId:"+crisisID, e);
throw new AidrException(
"Error while getting all models for crisis from Tagger", e);
}
}
// (1)
@Override
public TaggerAttribute createNewAttribute(TaggerAttribute attribute)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(attribute
.toDTO())), Response.class);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute newAttribute = dto != null ? new TaggerAttribute(
dto) : null;
if (newAttribute != null) {
logger.info("Attribute with ID "
+ newAttribute.getNominalAttributeID()
+ " was created in Tagger");
}
return newAttribute;
} catch (Exception e) {
logger.error("Error while creating new attribute", e);
throw new AidrException(
"Error while creating new attribute in Tagger", e);
}
}
// (4)
@Override
public TaggerAttribute getAttributeInfo(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
logger.info("received request for nominal attribute id: " + id);
WebTarget webResource = client.target(taggerMainUrl + "/attribute/"
+ new Long(id));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute response = dto != null ? new TaggerAttribute(dto)
: null;
logger.info("Received response: " + response != null ? response
.getName() : "no attribute for id = " + id);
if (response != null) {
logger.info("Attribute with ID "
+ response.getNominalAttributeID()
+ " was retrieved from Tagger");
}
return response;
} catch (Exception e) {
logger.error("Error while getting attribute info for attribute: "+id, e);
throw new AidrException(
"Error while getting attribute from Tagger", e);
}
}
@Override
public TaggerLabel getLabelInfo(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl + "/label/"
+ id);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel response = new TaggerLabel(dto);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID()
+ " was retrieved from Tagger");
return response;
}
return null;
} catch (Exception e) {
logger.error("Error while getting label info for labelId: "+id, e);
throw new AidrException("Error while getting label from Tagger", e);
}
}
// (3)
@Override
public boolean deleteAttribute(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute/"
+ id);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Attribute with ID " + id
+ " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while deleting a attribute for attributeId: "+id, e);
throw new AidrException("Error while deleting attribute in Tagger",
e);
}
}
@Override
public boolean deleteTrainingExample(Integer id) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl
+ "/document/removeTrainingExample/" + new Long(id));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Document with ID " + id
+ " was deleted in Tagger");
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while deleting document", e);
throw new AidrException("Error while deleting document in Tagger",
e);
}
}
@Override
public boolean removeAttributeFromCrises(Integer modelFamilyID)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
deletePybossaApp(modelFamilyID);
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/" + new Long(modelFamilyID));
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).delete();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerStatusResponse response = objectMapper.readValue(
jsonResponse, TaggerStatusResponse.class);
if (response != null && response.getStatusCode() != null) {
if ("SUCCESS".equals(response.getStatusCode())) {
logger.info("Classifier was remove from crises by modelFamilyID: "
+ modelFamilyID);
return true;
} else {
return false;
}
}
return false;
} catch (Exception e) {
logger.error("Error while removing classifier from crisis ", e);
throw new AidrException(
"Error while removing classifier from crisis in Tagger", e);
}
}
// (2)
@Override
public TaggerAttribute updateAttribute(TaggerAttribute attribute)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/attribute");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).put(
Entity.json(objectMapper.writeValueAsString(attribute
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalAttributeDTO dto = objectMapper.readValue(jsonResponse,
NominalAttributeDTO.class);
TaggerAttribute updatedAttribute = new TaggerAttribute(dto);
if (updatedAttribute != null) {
logger.info("Attribute with id "
+ updatedAttribute.getNominalAttributeID()
+ " was updated in Tagger");
} else {
return null;
}
return attribute;
} catch (Exception e) {
logger.error("Error while updating attribute: "+attribute.getCode(), e);
throw new AidrException("Error while updating attribute in Tagger",
e);
}
}
@Override
public TaggerLabel updateLabel(TaggerLabelRequest label)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
// WebResource webResource = client.resource(taggerMainUrl +
// "/label");
WebTarget webResource = client.target(taggerMainUrl + "/label");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON)
.put(Entity.json(objectMapper.writeValueAsString(label
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel updatedLabel = new TaggerLabel(dto);
if (updatedLabel != null) {
logger.info("Label with id " + updatedLabel.getNominalLabelID()
+ " was updated in Tagger");
} else {
return null;
}
return updatedLabel;
} catch (Exception e) {
logger.error("Error while updating label: "+label.getName(), e);
throw new AidrException("Error while updating label in Tagger", e);
}
}
@Override
public TaggerLabel createNewLabel(TaggerLabelRequest label)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
WebTarget webResource = client.target(taggerMainUrl + "/label");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON)
.post(Entity.json(objectMapper.writeValueAsString(label
.toDTO())), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
NominalLabelDTO dto = objectMapper.readValue(jsonResponse,
NominalLabelDTO.class);
TaggerLabel response = new TaggerLabel(dto);
if (response != null) {
logger.info("Label with ID " + response.getNominalLabelID()
+ " was created in Tagger");
return response;
} else {
logger.error("Error while creating new label: "+label.getName());
throw new AidrException(
"Error while creating new label in Tagger");
}
} catch (Exception e) {
logger.error("Error while creating new label: "+label.getName(), e);
throw new AidrException("Error while creating new label in Tagger",
e);
}
}
// (7)
@Override
public TaggerAttribute attributeExists(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/attribute/code/" + code);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerAttribute attribute = objectMapper.readValue(jsonResponse,
TaggerAttribute.class);
if (attribute != null) {
logger.info("Attribute with the code " + code
+ " already exist in Tagger.");
return attribute;
} else {
return null;
}
} catch (Exception e) {
logger.error("Error while checking if attribute: "+code+" exist", e);
throw new AidrException(
"Error while checking if attribute exist in Tagger", e);
}
}
@Override
public List<TrainingDataDTO> getTrainingDataByModelIdAndCrisisId(
Integer modelFamilyId, Integer crisisId, Integer start,
Integer limit, String sortColumn, String sortDirection)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
logger.info("Received request for fetching training data for crisisID = "
+ crisisId + "and modelFamilyId = " + modelFamilyId);
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/getTrainingData?crisisID=" + new Long(crisisId)
+ "&modelFamilyID=" + new Long(modelFamilyId)
+ "&fromRecord=" + start + "&limit=" + limit
+ "&sortColumn=" + sortColumn + "&sortDirection="
+ sortDirection);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
TrainingDataRequest trainingDataRequest = objectMapper.readValue(
jsonResponse, TrainingDataRequest.class);
if (trainingDataRequest != null
&& trainingDataRequest.getTrainingData() != null) {
logger.info("Tagger returned "
+ trainingDataRequest.getTrainingData().size()
+ " training data records for crisis with ID: "
+ crisisId + " and family model with ID: "
+ modelFamilyId);
return trainingDataRequest.getTrainingData();
} else {
return null;
}
} catch (Exception e) {
logger.error(crisisId + " Error while Getting training data for Crisis and Model", e);
throw new AidrException(
"Error while Getting training data for Crisis and Model.",
e);
}
}
@Override
public String getAssignableTask(Integer id, String userName)
throws AidrException {
Integer taggerUserId = isUserExistsByUsername(userName);
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// taskBufferNumber currently always 1
int taskBufferNumber = 1;
// WebResource webResource = client.resource(crowdsourcingAPIMainUrl
// + "/taskbuffer/getassignabletask/" + userName + "/" + id + "/" +
// taskBufferNumber);
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/document/getassignabletask/" + userName + "/" + id
+ "/" + taskBufferNumber);
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getAssignableTask - clientResponse : "
+ clientResponse);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting Assignable Task in Tagger for user: "+userName, e);
throw new AidrException(
"Error while getting Assignable Task in Tagger", e);
}
}
@Override
public String getTemplateStatus(String code) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/template/status/crisis/code/" + code);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getTemplateStatus - clientResponse : "
+ clientResponse);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting Template Status in Tagger for code:"+code, e);
throw new AidrException(
"Error while getting Template Status in Tagger", e);
}
}
@Override
public String skipTask(Integer id, String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// WebResource webResource = client.resource(crowdsourcingAPIMainUrl
// + "/taskassignment/revert/searchByDocUserName/" + userName + "/"
// + id);
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/taskassignment/revert/searchByDocUserName/" + userName
+ "/" + id);
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("Skipping task: " + id + " for user = " + userName);
logger.info("skipTask - clientResponse : " + clientResponse);
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while Skip Task operation for user: "+userName, e);
throw new AidrException("Error while Skip Task operation", e);
}
}
@Override
public boolean saveTaskAnswer(List<TaskAnswer> taskAnswer)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/taskanswer/save");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
logger.info("saveTaskAnswer - postData : "
+ objectMapper.writeValueAsString(taskAnswer));
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(
Entity.json(objectMapper.writeValueAsString(taskAnswer)),
Response.class);
//logger.info("saveTaskAnswer - response status : "
// + clientResponse.getStatus());
return clientResponse.getStatus() == 204;
} catch (Exception e) {
logger.error("Error while saving TaskAnswer in AIDRCrowdsourcing", e);
return true;
}
}
@Override
public String loadLatestTweets(String code, String constraints)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count=1000");
logger.info("Invoking: " + outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count=1000 constraints:" + constraints);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(constraints),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
if (jsonResponse != null
&& (jsonResponse.startsWith("{") || jsonResponse
.startsWith("["))) {
return jsonResponse;
} else {
return "";
}
} catch (Exception e) {
logger.error("Error while generating loading latest tweets for the collection: "+code, e);
throw new AidrException(
"Error while generating Tweet Ids link in taggerPersister",
e);
}
}
@Override
public ModelHistoryWrapper getModelHistoryByModelFamilyID(Integer start,
Integer limit, Integer id, String sortColumn, String sortDirection) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// WebResource webResource = client.resource(taggerMainUrl +
// "/model/modelFamily/" + id
// + "?start=" + start
// + "&limit=" + limit);
WebTarget webResource = client.target(taggerMainUrl
+ "/model/modelFamily/" + id + "?start=" + start
+ "&limit=" + limit+ "&sortColumn=" + sortColumn + "&sortDirection="
+ sortDirection);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
// ClientResponse clientResponse =
// webResource.type(MediaType.APPLICATION_JSON)
// .accept(MediaType.APPLICATION_JSON)
// .get(ClientResponse.class);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
ModelHistoryWrapper modelHistoryWrapper = objectMapper.readValue(
jsonResponse, ModelHistoryWrapper.class);
return modelHistoryWrapper;
} catch (Exception e) {
logger.error("Error while Getting history records for Mode", e);
throw new AidrException(
"Error while Getting history records for Model.", e);
}
}
@Override
public List<TaggerModelNominalLabel> getAllLabelsForModel(Integer modelID,
String crisisCode) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
//logger.info("received request for modelID = " + modelID);
WebTarget webResource = client.target(taggerMainUrl
+ "/modelNominalLabel/" + modelID + "/" + crisisCode);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerModelLabelsResponse modelLabelsResponse = objectMapper
.readValue(jsonResponse, TaggerModelLabelsResponse.class);
if (modelLabelsResponse.getModelNominalLabelsDTO() != null) {
logger.info("Tagger returned "
+ modelLabelsResponse.getModelNominalLabelsDTO().size()
+ " labels for model with ID " + modelID);
for (TaggerModelNominalLabel dto : modelLabelsResponse
.getModelNominalLabelsDTO()) {
logger.info("Training count for crisis = " + crisisCode
+ ", label: " + dto.getNominalLabel().getName()
+ " is = " + dto.getTrainingDocuments());
}
}
return modelLabelsResponse.getModelNominalLabelsDTO();
} catch (Exception e) {
logger.error("Error while getting all labels for model from Tagger for the crisis: "+crisisCode, e);
throw new AidrException(
"Error while getting all labels for model from Tagger", e);
}
}
@Override
public String getRetainingThreshold() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/train/samplecountthreshold");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.getEntity(String.class);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Exception while getting training sampleCountThreshold", e);
throw new AidrException("getRetainingThreshold : ", e);
}
}
@Override
public Map<String, Integer> getTaggersForCollections(
List<String> collectionCodes) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
/**
* Rest call to Tagger
*/
// WebResource webResource = client.resource(taggerMainUrl +
// "/modelfamily/taggers-by-codes");
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/taggers-by-codes");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource
.request(MediaType.APPLICATION_JSON)
.post(Entity.json(objectMapper
.writeValueAsString(new TaggersForCollectionsRequest(
collectionCodes))), Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
TaggersForCollectionsResponse taggersResponse = objectMapper
.readValue(jsonResponse,
TaggersForCollectionsResponse.class);
if (taggersResponse != null
&& !taggersResponse.getTaggersForCodes().isEmpty()) {
Map<String, Integer> result = new HashMap<String, Integer>();
for (TaggersForCodes taggerForCode : taggersResponse
.getTaggersForCodes()) {
result.put(taggerForCode.getCode(),
taggerForCode.getCount());
}
return result;
} else {
return Collections.emptyMap();
}
} catch (Exception e) {
logger.error("Error while getting taggers for collections", e);
throw new AidrException("Error while getting taggers for collections", e);
}
}
@Override
public boolean pingTagger() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl + "/misc/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging tagger", e);
throw new AidrException(
"Error while pinging tagger.",e);
}
}
@Override
public boolean pingTrainer() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/util/ping/heartbeat");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null && "200".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging trainer", e);
throw new AidrException(
"Error while pinging trainer.",e);
}
}
@Override
public boolean pingAIDROutput() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(outputAPIMainUrl
+ "/manage/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging output", e);
throw new AidrException(
"Error while pinging output",
e);
}
}
@Override
public boolean pingPersister() throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(persisterMainUrl
+ "/persister/ping");
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
PingResponse pingResponse = objectMapper.readValue(jsonResponse,
PingResponse.class);
if (pingResponse != null
&& "RUNNING".equals(pingResponse.getStatus())) {
return true;
} else {
return false;
}
} catch (Exception e) {
logger.error("Error while pinging persister", e);
throw new AidrException(
"Error while pinging persister",e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateCSVLink(String code)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genCSV?collectionCode=" + code
+ "&exportLimit=100000");
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating CSV link in Persister for collection: "+code, e);
throw new AidrException(
"[generateCSVLink] Error while generating CSV link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateTweetIdsLink(String code)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
logger.info("[generateTweetIdsLink] Received request for code: "
+ code);
try {
/*System.out.println("Invoked URL: " + persisterMainUrl
+ "/taggerPersister/genTweetIds?collectionCode=" + code
+ "&downloadLimited=true");*/
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genTweetIds?collectionCode=" + code
+ "&downloadLimited=true");
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating Tweet Ids link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsLink] Error while generating Tweet Ids link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJSONLink(String code, String jsonType)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genJson?collectionCode=" + code
+ "&exportLimit=100000" + "&jsonType=" + jsonType);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
// String jsonResponse = clientResponse.readEntity(String.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating JSON download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJSONLink] Error while generating JSON download link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateJsonTweetIdsLink(String code,
String jsonType) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/genJsonTweetIds?collectionCode=" + code
+ "&downloadLimited=true&" + "&jsonType=" + jsonType);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsLink] Error while generating JSON Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateCSVFilteredLink(String code,
String queryString, String userName, Integer count, boolean removeRetweet) throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
String url = null;
Response clientResponse = null;
url = persisterMainUrl + "/taggerPersister/filter/genCSV?collectionCode=" + code + "&exportLimit="
+ count + "&userName=" + userName + "&removeRetweet=" + removeRetweet;
WebTarget webResource = client.target(url);
clientResponse = webResource.request(MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.info("Error while generating csv filtered download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateCSVFilteredLink] Error while generating csv filtered download link in Persister for collection",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateTweetIdsFilteredLink(String code,
String queryString, String userName) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: "+ code);
try {
logger.info("[generateTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genTweetIds?collectionCode="
+ code + "&downloadLimited=true&userName=" + userName);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genTweetIds?collectionCode="
+ code + "&downloadLimited=true&userName=" + userName);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
//logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtererd Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsFilteredLink] Error while generating filtererd Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateTweetIdsOnlyFilteredLink(String code, String queryString, String userName, Integer count, Boolean removeRetweet) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
logger.info("[generateTweetIdsOnlyFilteredLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + count + "&removeRetweet=" + removeRetweet);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + count + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtererd Tweet Ids Only download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateTweetIdsOnlyFilteredLink] Error while generating filtererd Tweet Ids Only download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJSONFilteredLink(String code,
String queryString, String jsonType, String userName, Integer count, boolean removeRetweet)
throws AidrException {
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJson?collectionCode=" + code
+ "&exportLimit=" + count + "&jsonType=" + jsonType
+ "&userName=" + userName + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating filtered JSON download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJSONFilteredLink] Error while generating filtered JSON download link in Persister",
e);
}
}
// Added by koushik
@Override
public Map<String, Object> generateJsonTweetIdsFilteredLink(String code,
String queryString, String jsonType, String userName)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: " + code);
try {
logger.info("[generateJsonTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIds?collectionCode="
+ code + "&downloadLimited=true&" + "&jsonType=" + jsonType
+ "&userName=" + userName);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIds?collectionCode="
+ code + "&downloadLimited=true&" + "&jsonType=" + jsonType
+ "&userName=" + userName);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
//logger.info("Returning from func: " + jsonResponse);
return jsonResponse;
/*
* if (jsonResponse != null &&
* "http".equals(jsonResponse.substring(0, 4))) { return
* jsonResponse; } else { return ""; }
*/
} catch (Exception e) {
logger.error("Error while generating filtered JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsFilteredLink] Error while generating filtered JSON Tweet Ids download link in Persister",
e);
}
}
@Override
public Map<String, Object> generateJsonTweetIdsOnlyFilteredLink(String code,
String queryString, String jsonType, String userName, Integer exportLimit, Boolean removeRetweet)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
//System.out.println("[generateJsonTweetIdsLink] Received request for code: " + code);
try {
logger.info("[generateJsonTweetIdsLink] Invoked URL: "
+ persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + exportLimit + "&jsonType="
+ jsonType + "&removeRetweet=" + removeRetweet);
WebTarget webResource = client.target(persisterMainUrl
+ "/taggerPersister/filter/genJsonTweetIdsOnly?collectionCode="
+ code + "&userName=" + userName + "&exportLimit=" + exportLimit + "&jsonType="
+ jsonType + "&removeRetweet=" + removeRetweet);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
Map<String, Object> jsonResponse = clientResponse
.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while generating filtered JSON Tweet Ids download link in Persister for collection: "+code, e);
throw new AidrException(
"[generateJsonTweetIdsFilteredLink] Error while generating filtered JSON Tweet Ids download link in Persister",
e);
}
}
private java.util.Collection<TaggerAttribute> convertTaggerCrisesAttributeToDTO(
List<TaggerCrisesAttribute> attributes, Integer userId) {
Map<Integer, TaggerAttribute> result = new HashMap<Integer, TaggerAttribute>();
for (TaggerCrisesAttribute a : attributes) {
if (!result.containsKey(a.getNominalAttributeID())) {
if (!userId.equals(a.getUserID())
&& !(new Integer(1)).equals(a.getUserID())) {
continue;
}
TaggerUser user = new TaggerUser(a.getUserID());
List<TaggerLabel> labels = new ArrayList<TaggerLabel>();
TaggerLabel label = new TaggerLabel(a.getLabelName(),
a.getLabelID());
labels.add(label);
TaggerAttribute taggerAttribute = new TaggerAttribute(
a.getCode(), a.getDescription(), a.getName(),
a.getNominalAttributeID(), user, labels);
result.put(a.getNominalAttributeID(), taggerAttribute);
} else {
TaggerAttribute taggerAttribute = result.get(a
.getNominalAttributeID());
List<TaggerLabel> labels = taggerAttribute
.getNominalLabelCollection();
TaggerLabel label = new TaggerLabel(a.getLabelName(),
a.getLabelID());
labels.add(label);
}
}
logger.info("Created attributes collection of size = " + result.size());
//logger.info(result);
return result.values();
}
private int getCurrentRetrainingThreshold() throws Exception {
try {
String retrainingThreshold = this.getRetainingThreshold();
ObjectMapper mapper = JacksonWrapper.getObjectMapper();
JsonFactory factory = mapper.getJsonFactory(); // since 2.1 use
// mapper.getFactory()
// instead
JsonParser jp = factory.createJsonParser(retrainingThreshold);
JsonNode actualObj = mapper.readTree(jp);
JsonNode nameNode = actualObj.get("sampleCountThreshold");
int sampleCountThreshold = Integer.parseInt(nameNode.asText());
return sampleCountThreshold;
} catch (Exception e) {
logger.error("Exception while getting CurrentRetrainingThreshold", e);
return 50;
}
}
private void deletePybossaApp(Integer modelFamilyID) {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
//System.out.print("removeAttributeFromCrises: starting ......................................");
WebTarget webResource = client.target(taggerMainUrl
+ "/modelfamily/" + modelFamilyID);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper
.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
Response resp = webResource.request(MediaType.APPLICATION_JSON)
.get();
String jsonResp = resp.readEntity(String.class);
TaggerModelFamily tm = objectMapper.readValue(jsonResp,
TaggerModelFamily.class);
if (tm != null && tm.getCrisis() != null
&& tm.getNominalAttribute() != null) {
String crisisCode = tm.getCrisis().getCode();
String attributeCode = tm.getNominalAttribute().getCode();
logger.info("crisisCode: " + crisisCode + " attributeCode: " + attributeCode);
WebTarget webResp = client.target(crowdsourcingAPIMainUrl
+ "/clientapp/delete/" + crisisCode + "/"
+ attributeCode);
Response clientResp = webResource.request(
MediaType.APPLICATION_JSON).get();
//logger.info("deactivated - clientResponse : " + clientResp);
} else {
logger.info("No modelfamily found for id = " + modelFamilyID);
}
} catch (Exception e) {
logger.error("deactivated - deletePybossaApp : " + e);
}
}
@Override
public String getAttributesAndLabelsByCrisisId(Integer id)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// http://example.com:8084/AIDRTrainerAPI/rest/crisis/id/1234
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/crisis/id/" + id);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
logger.info("getAssignableTask - clientResponse : "
+ clientResponse);
String jsonResponse = clientResponse.readEntity(String.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Error while getting all nominal attributes and their labels for a given crisisID: "+id, e);
throw new AidrException(
"Error while getting all nominal attributes and their labels for a given crisisID",
e);
}
}
@Override
public int trashCollection(Collection collection) throws Exception {
int retVal = 0;
Long crisisID = -1L;
//logger.info("[trashCollection] request received for collection: "
//+ collection.getCode());
// First clean up the aidr-predict database of documents
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client
.target(taggerMainUrl + "/manage/collection/trash/crisis/" + collection.getCode());
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("[trashCollection] response from tagger-api: "
//+ jsonResponse);
if (jsonResponse != null && jsonResponse.contains("TRASHED")) {
retVal = 1;
crisisID = Long.parseLong(jsonResponse.substring(
jsonResponse.indexOf(":") + 1,
jsonResponse.indexOf("}")));
} else {
retVal = 0;
}
} catch (Exception e) {
logger.error("Error while attempting trash a collection: ", e);
throw new AidrException(
"Error while deleting a collection from aidr-predict database",
e);
}
//logger.info("[trashCollection] result of cleaning aidr-predict: "
//+ crisisID);
if (retVal > 0 && crisisID < 0) {
return 1; // crisis does not exist in aidr_predict table. Reason: no
// classifier attached
}
if (retVal > 0 && crisisID > 0) {
// Final DB task - cleanup the aidr-scheduler database of
// micromapper tasks
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(crowdsourcingAPIMainUrl
+ "/clientapp/delete/crisis/" + crisisID);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("[trashCollection] response from trainer-api: "
//+ jsonResponse);
if (jsonResponse != null
&& jsonResponse.equalsIgnoreCase("{\"status\":200}")) {
logger.info("[trashCollection] Success in trashing ");
return 1;
} else {
return 0;
}
} catch (Exception e) {
logger.error("Error while attempting trash REST call for aidr_scheduler for collection: ", e);
throw new AidrException(
"Error while attempting trash REST call for aidr_scheduler",
e);
}
}
return 0;
}
@Override
public int untrashCollection(String collectionCode) throws Exception {
//System.out.println("[untrashCollection] request received for collection: "+ collectionCode);
try {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
WebTarget webResource = client.target(taggerMainUrl
+ "/manage/collection/untrash/crisis/" + collectionCode);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
//System.out.println("[untrashCollection] response from tagger-api: "
// + jsonResponse);
if (jsonResponse != null
&& jsonResponse
.equalsIgnoreCase("{\"status\": \"UNTRASHED\"}")) {
logger.info("[trashCollection] Success in untrashing + "
+ collectionCode);
return 1;
} else {
return 0;
}
} catch (Exception e) {
logger.error("Error while attempting /untrash REST call for collection: "+collectionCode, e);
throw new AidrException(
"Error while attempting /untrash REST call", e);
}
}
@Override
public String loadLatestTweetsWithCount(String code, int count)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
String constraints = "{\"constraints\":[]}";
WebTarget webResource = client.target(outputAPIMainUrl
+ "/crisis/fetch/channel/filter/" + code + "?count="
+ count);
/*System.out.println("[loadLatestTweetsWithCount] Invoking: "
+ outputAPIMainUrl + "/crisis/fetch/channel/filter/" + code
+ "?count=" + count);
System.out.println("[loadLatestTweetsWithCount] constraints: "
+ constraints);*/
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(constraints),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
if (jsonResponse != null
&& (jsonResponse.startsWith("{") || jsonResponse
.startsWith("["))) {
/*System.out
.println("[loadLatestTweetsWithCount] jsonResponse for collection "
+ code + ": " + jsonResponse);*/
return jsonResponse;
} else {
/*System.out
.println("[loadLatestTweetsWithCount] jsonResponse for collection "
+ code + ": \"\"");*/
return "";
}
} catch (Exception e) {
logger.error("Error while loadLatestTweetsWithCount for collection: "+code, e);
throw new AidrException("Error while loadLatestTweetsWithCount", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisID(
Long crisisID, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "?count="
+ count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisCode(
String crisisCode, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisCode/" + crisisCode + "?count="
+ count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisis code = "
+ crisisCode + " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisis code = "
+ crisisCode + " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisIDUserID(
Long crisisID, Long userID, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "/userID/"
+ userID + "?count=" + count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + ", userId = " + userID
+ " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + ", userId = " + userID
+ " from Tagger", e);
}
}
@Override
public TaggerResponseWrapper getHumanLabeledDocumentsByCrisisIDUserName(
Long crisisID, String userName, Integer count) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
// Rest call to Tagger
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/humanLabeled/crisisID/" + crisisID + "/userName/"
+ userName + "?count=" + count);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
String jsonResponse = clientResponse.readEntity(String.class);
TaggerResponseWrapper dtoList = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ (dtoList.getHumanLabeledItems() != null ? dtoList
.getHumanLabeledItems().size() : 0));
return dtoList;
} catch (Exception e) {
logger.error("Error while getting all human labeled documents for crisisID = "
+ crisisID + ", user name = " + userName
+ " from Tagger", e);
throw new AidrException(
"Error while getting all human labeled documents for crisisID = "
+ crisisID + ", user name = " + userName
+ " from Tagger", e);
}
}
@Override
public Map<String, Object> downloadHumanLabeledDocumentsByCrisisUserName(
String queryString, String crisisCode, String userName,
Integer count, String fileType, String contentType)
throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
String targetURL = taggerMainUrl
+ "/misc/humanLabeled/download/crisis/" + crisisCode
+ "/userName/" + userName + "?count=" + count
+ "&fileType=" + fileType + "&contentType=" + contentType;
logger.info("Going to invoke REST API: " + targetURL + " POST body: " + queryString);
try {
// Rest call to Tagger
WebTarget webResource = client.target(targetURL);
ObjectMapper objectMapper = JacksonWrapper.getObjectMapper();
objectMapper.configure(
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES,
false);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).post(Entity.json(queryString),
Response.class);
String jsonResponse = clientResponse.readEntity(String.class);
//logger.info("Response = " + jsonResponse);
TaggerResponseWrapper response = objectMapper.readValue(
jsonResponse, TaggerResponseWrapper.class);
logger.info("Number of human labeled documents returned by Tagger: "
+ response.getTotal());
Map<String, Object> retVal = new HashMap<String, Object>();
retVal.put("fileName", response.getMessage());
retVal.put("total", response.getTotal());
return retVal;
} catch (Exception e) {
logger.error("Error while getting download link for human labeled documents for crisis code = "
+ crisisCode+ ", user name = "+ userName+ " from Tagger", e);
throw new AidrException("Error while getting download link for human labeled documents for crisis code = "
+ crisisCode+ ", user name = "+ userName+ " from Tagger", e);
}
}
@Override
public Map<String, Object> updateMicromapperEnabled(String code, Boolean isMicromapperEnabled) throws AidrException {
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/crisis/update/micromapperEnabled/" + code +"/"+isMicromapperEnabled);
Response clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
Map<String, Object> jsonResponse = clientResponse.readEntity(Map.class);
return jsonResponse;
} catch (Exception e) {
logger.error("Exception while updating isMicromapperEnabled ", e);
throw new AidrException("Exception while updating isMicromapperEnabled ",
e);
}
}
@Override
public Boolean sendMailService(String subject, String body){
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
Response clientResponse = null;
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/misc/sendEmail");
Form form = new Form();
form.param("subject", subject);
form.param("body", body);
clientResponse = webResource.request().post(
Entity.entity(form,MediaType.APPLICATION_FORM_URLENCODED),Response.class);
if (clientResponse.getStatus() != 200) {
logger.warn("Couldn't contact AIDRTaggerAPI for sending error message");
return false;
}
} catch (Exception e) {
logger.error("Error in contacting AIDRTaggerAPI: " + clientResponse);
return false;
}
return true;
}
@Override
public Long getLabelCount(Long collectionId) {
Long labelCount = 0L;
Client client = ClientBuilder.newBuilder()
.register(JacksonFeature.class).build();
Response clientResponse = null;
try {
WebTarget webResource = client.target(taggerMainUrl
+ "/label/collection/" + collectionId);
clientResponse = webResource.request(
MediaType.APPLICATION_JSON).get();
labelCount = clientResponse.readEntity(Long.class);
if (clientResponse.getStatus() != 200) {
logger.warn("Couldn't contact AIDRTaggerAPI for sending error message");
}
} catch (Exception e) {
logger.error("Error in contacting AIDRTaggerAPI: " + clientResponse, e);
}
return labelCount;
}
}
| Commit for status code issue
| aidr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java | Commit for status code issue | <ide><path>idr-manager/src/main/java/qa/qcri/aidr/manager/service/impl/TaggerServiceImpl.java
<ide> import org.codehaus.jackson.map.annotate.JsonSerialize;
<ide> import org.glassfish.jersey.jackson.JacksonFeature;
<ide> import org.springframework.beans.factory.annotation.Autowired;
<add>import org.springframework.http.HttpStatus;
<ide> import org.springframework.stereotype.Service;
<ide>
<ide> import qa.qcri.aidr.common.code.JacksonWrapper;
<ide> //logger.info("saveTaskAnswer - response status : "
<ide> // + clientResponse.getStatus());
<ide>
<del> return clientResponse.getStatus() == 204;
<add> return clientResponse.getStatus() == 200
<add> || clientResponse.getStatus() == 204;
<ide> } catch (Exception e) {
<ide> logger.error("Error while saving TaskAnswer in AIDRCrowdsourcing", e);
<ide> return true; |
|
Java | agpl-3.0 | 4208423946bebb4bbaf8e1dd813ab1f42fad2bc5 | 0 | podd/podd-redesign,podd/podd-redesign,podd/podd-redesign,podd/podd-redesign | /**
*
*/
package com.github.podd.restlet;
import java.util.ArrayList;
import java.util.List;
import org.openrdf.model.URI;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.restlet.security.Role;
import com.github.ansell.restletutils.RestletUtilRole;
/**
* The Roles available for PODD users.
*
* @author Peter Ansell [email protected]
*
* Copied from http://github.com/ansell/restlet-utils
*/
public enum PoddRoles implements RestletUtilRole
{
ADMIN("Administrator", "A repository administrator of the PODD System",
"http://purl.org/podd/ns/poddUser#RoleAdministrator", true),
PROJECT_CREATOR("Project Creator", "A User who can create new projects",
"http://purl.org/podd/ns/poddUser#RoleProjectCreator", true),
PROJECT_MEMBER("Project Member", "A user who is a member of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectMember", true),
PROJECT_OBSERVER("Project Observer", "A user who is an observer of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectObserver", true),
PROJECT_ADMIN("Project Administrator", "A user who is an administrator of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectAdministrator", true),
;
public static RestletUtilRole getRoleByName(final String name)
{
for(final RestletUtilRole nextRole : PoddRoles.values())
{
if(nextRole.getName().equals(name))
{
return nextRole;
}
}
return null;
}
public static RestletUtilRole getRoleByUri(final URI nextUri)
{
for(final RestletUtilRole nextRole : PoddRoles.values())
{
if(nextRole.getURI().equals(nextUri))
{
return nextRole;
}
}
return null;
}
public static List<Role> getRoles()
{
final List<Role> result = new ArrayList<Role>(PoddRoles.values().length);
for(final RestletUtilRole nextRole : PoddRoles.values())
{
// WARNING: After Restlet-2.1RC5 Roles will only be considered equal if they are the
// same java object, so this must not create a new Role each time
result.add(nextRole.getRole());
}
return result;
}
private final Role role;
private final URI uri;
private final boolean isAssignable;
/**
* Constructor
*
* @param roleName
* @param description
* @param uriString
*/
PoddRoles(final String roleName, final String description, final String uriString, final boolean isAssignable)
{
this.role = new Role(roleName, description);
this.uri = ValueFactoryImpl.getInstance().createURI(uriString);
this.isAssignable = isAssignable;
}
/**
* @return the description
*/
@Override
public String getDescription()
{
return this.role.getDescription();
}
/**
* @return the name
*/
@Override
public String getName()
{
return this.role.getName();
}
@Override
public Role getRole()
{
return this.role;
}
@Override
public URI getURI()
{
return this.uri;
}
@Override
public boolean isAssignable()
{
return this.isAssignable;
}
}
| webapp/api/src/main/java/com/github/podd/restlet/PoddRoles.java | /**
*
*/
package com.github.podd.restlet;
import java.util.ArrayList;
import java.util.List;
import org.openrdf.model.URI;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.restlet.security.Role;
import com.github.ansell.restletutils.RestletUtilRole;
/**
* The Roles available for PODD users.
*
* @author Peter Ansell [email protected]
*
* Copied from http://github.com/ansell/restlet-utils
*/
public enum PoddRoles implements RestletUtilRole
{
ADMIN("Administrator", "A repository administrator of the PODD System",
"http://purl.org/podd/ns/poddUser#RoleAdministrator", true),
PROJECT_CREATOR("Project Creator", "A User who can create new projects",
"http://purl.org/podd/ns/poddUser#RoleProjectCreator", true),
PROJECT_MEMBER("Project member", "A user who is a member of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectMember", true),
PROJECT_OBSERVER("Project observer", "A user who is an observer of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectObserver", true),
PROJECT_ADMIN("Project Administrator", "A user who is an administrator of a particular project",
"http://purl.org/podd/ns/poddUser#RoleProjectAdministrator", true),
;
public static RestletUtilRole getRoleByName(final String name)
{
for(final RestletUtilRole nextRole : PoddRoles.values())
{
if(nextRole.getName().equals(name))
{
return nextRole;
}
}
return null;
}
public static RestletUtilRole getRoleByUri(final URI nextUri)
{
for(final RestletUtilRole nextRole : PoddRoles.values())
{
if(nextRole.getURI().equals(nextUri))
{
return nextRole;
}
}
return null;
}
public static List<Role> getRoles()
{
final List<Role> result = new ArrayList<Role>(PoddRoles.values().length);
for(final RestletUtilRole nextRole : PoddRoles.values())
{
// WARNING: After Restlet-2.1RC5 Roles will only be considered equal if they are the
// same java object, so this must not create a new Role each time
result.add(nextRole.getRole());
}
return result;
}
private final Role role;
private final URI uri;
private final boolean isAssignable;
/**
* Constructor
*
* @param roleName
* @param description
* @param uriString
*/
PoddRoles(final String roleName, final String description, final String uriString, final boolean isAssignable)
{
this.role = new Role(roleName, description);
this.uri = ValueFactoryImpl.getInstance().createURI(uriString);
this.isAssignable = isAssignable;
}
/**
* @return the description
*/
@Override
public String getDescription()
{
return this.role.getDescription();
}
/**
* @return the name
*/
@Override
public String getName()
{
return this.role.getName();
}
@Override
public Role getRole()
{
return this.role;
}
@Override
public URI getURI()
{
return this.uri;
}
@Override
public boolean isAssignable()
{
return this.isAssignable;
}
}
| Adjust Role names | webapp/api/src/main/java/com/github/podd/restlet/PoddRoles.java | Adjust Role names | <ide><path>ebapp/api/src/main/java/com/github/podd/restlet/PoddRoles.java
<ide> PROJECT_CREATOR("Project Creator", "A User who can create new projects",
<ide> "http://purl.org/podd/ns/poddUser#RoleProjectCreator", true),
<ide>
<del> PROJECT_MEMBER("Project member", "A user who is a member of a particular project",
<add> PROJECT_MEMBER("Project Member", "A user who is a member of a particular project",
<ide> "http://purl.org/podd/ns/poddUser#RoleProjectMember", true),
<ide>
<del> PROJECT_OBSERVER("Project observer", "A user who is an observer of a particular project",
<add> PROJECT_OBSERVER("Project Observer", "A user who is an observer of a particular project",
<ide> "http://purl.org/podd/ns/poddUser#RoleProjectObserver", true),
<ide>
<ide> PROJECT_ADMIN("Project Administrator", "A user who is an administrator of a particular project", |
|
Java | lgpl-2.1 | fe21259167417ebf93360e9b62d5d6406a88819e | 0 | pentaho/pentaho-commons-xul | package org.pentaho.ui.xul.swt.tags;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.components.XulMenuitem;
import org.pentaho.ui.xul.dom.Element;
import org.pentaho.ui.xul.swt.SwtElement;
public class SwtMenuitem extends SwtElement implements XulMenuitem{
private static final Log logger = LogFactory.getLog(SwtMenuitem.class);
private String onCommand;
private boolean disabled = false;
private MenuItem item;
public SwtMenuitem(Element self, XulComponent parent, XulDomContainer domContainer, String tagName) {
super("menuitem");
setManagedObject("empty");
if(parent.getManagedObject() != null && parent.getManagedObject() instanceof Menu){
item = new MenuItem((Menu) parent.getManagedObject(), SWT.PUSH);
item.addSelectionListener(new SelectionAdapter(){
@Override
public void widgetSelected(SelectionEvent arg0) {
String command = SwtMenuitem.this.onCommand;
if(command != null){
invoke(command);
}
}
});
}
}
private String acceltext = "";
private String accesskey = "";
private String label = "";
private String image = "";
private boolean selected = false;
public String getAcceltext() {
return acceltext;
}
public String getAccesskey() {
return accesskey;
}
public boolean isDisabled() {
return disabled;
}
public String getLabel() {
return label;
}
public void setAcceltext(String accel) {
this.acceltext = accel;
setText();
}
private void setText(){
if(item != null){
String text = "";
if(this.label != null){
text += this.label;
}
text += "\t"+acceltext;
item.setText(text);
}
}
public void setAccesskey(String accessKey) {
if(item != null){
int mask = 0;
if(accessKey.indexOf("ctrl") > -1){
mask += SWT.CTRL;
}
if(accessKey.indexOf("shift") > -1){
mask += SWT.SHIFT;
}
if(accessKey.indexOf("alt") > -1){
mask += SWT.ALT;
}
String remainder = accessKey.replaceAll("ctrl", "").replaceAll("shift", "").replaceAll("alt", "").replaceAll("-", "").trim();
if(remainder.length() == 1){
mask += remainder.toUpperCase().charAt(0);
}
item.setAccelerator(mask);
}
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
if (item != null) {
item.setEnabled(!disabled);
}
}
public void setDisabled(String disabled) {
setDisabled(Boolean.parseBoolean(disabled));
}
public void setLabel(String label) {
this.label = label;
setText();
}
public String getImage() {
return image;
}
public boolean isSelected() {
return selected;
}
public void setSelected(String selected){
this.selected = Boolean.parseBoolean(selected);
}
public void setSelected(boolean val){
selected = val;
}
public void setImage(String image) {
this.image = image;
}
public String getCommand() {
return this.onCommand;
}
public void setCommand(final String command) {
this.onCommand = command;
}
public String toString(){
return this.getLabel();
}
}
| pentaho-xul-swt/src/org/pentaho/ui/xul/swt/tags/SwtMenuitem.java | package org.pentaho.ui.xul.swt.tags;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulDomContainer;
import org.pentaho.ui.xul.components.XulMenuitem;
import org.pentaho.ui.xul.dom.Element;
import org.pentaho.ui.xul.swt.SwtElement;
public class SwtMenuitem extends SwtElement implements XulMenuitem{
private static final Log logger = LogFactory.getLog(SwtMenuitem.class);
private String onCommand;
private boolean disabled = false;
private MenuItem item;
public SwtMenuitem(Element self, XulComponent parent, XulDomContainer domContainer, String tagName) {
super("menuitem");
setManagedObject("empty");
if(parent.getManagedObject() != null && parent.getManagedObject() instanceof Menu){
item = new MenuItem((Menu) parent.getManagedObject(), SWT.PUSH);
item.addSelectionListener(new SelectionAdapter(){
@Override
public void widgetSelected(SelectionEvent arg0) {
String command = SwtMenuitem.this.onCommand;
if(command != null){
invoke(command);
}
}
});
}
}
private String acceltext = "";
private String accesskey = "";
private String label = "";
private String image = "";
private boolean selected = false;
public String getAcceltext() {
return acceltext;
}
public String getAccesskey() {
return accesskey;
}
public boolean isDisabled() {
return disabled;
}
public String getLabel() {
return label;
}
public void setAcceltext(String accel) {
this.acceltext = accel;
setText();
}
private void setText(){
if(item != null){
String text = "";
if(this.label != null){
text += this.label;
}
text += "\t"+acceltext;
item.setText(text);
}
}
public void setAccesskey(String accessKey) {
if(item != null){
int mask = 0;
if(accessKey.indexOf("ctrl") > -1){
mask += SWT.CTRL;
}
if(accessKey.indexOf("shift") > -1){
mask += SWT.SHIFT;
}
if(accessKey.indexOf("alt") > -1){
mask += SWT.ALT;
}
String remainder = accessKey.replaceAll("ctrl", "").replaceAll("shift", "").replaceAll("alt", "").replaceAll("-", "").trim();
if(remainder.length() == 1){
mask += remainder.toUpperCase().charAt(0);
}
item.setAccelerator(mask);
}
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
public void setDisabled(String disabled) {
this.disabled = Boolean.parseBoolean(disabled);
}
public void setLabel(String label) {
this.label = label;
setText();
}
public String getImage() {
return image;
}
public boolean isSelected() {
return selected;
}
public void setSelected(String selected){
this.selected = Boolean.parseBoolean(selected);
}
public void setSelected(boolean val){
selected = val;
}
public void setImage(String image) {
this.image = image;
}
public String getCommand() {
return this.onCommand;
}
public void setCommand(final String command) {
this.onCommand = command;
}
public String toString(){
return this.getLabel();
}
}
| Fixed so that it respects enable/disable
| pentaho-xul-swt/src/org/pentaho/ui/xul/swt/tags/SwtMenuitem.java | Fixed so that it respects enable/disable | <ide><path>entaho-xul-swt/src/org/pentaho/ui/xul/swt/tags/SwtMenuitem.java
<ide> }
<ide> public void setDisabled(boolean disabled) {
<ide> this.disabled = disabled;
<add> if (item != null) {
<add> item.setEnabled(!disabled);
<add> }
<ide> }
<ide>
<ide> public void setDisabled(String disabled) {
<del> this.disabled = Boolean.parseBoolean(disabled);
<add> setDisabled(Boolean.parseBoolean(disabled));
<ide> }
<ide>
<ide> public void setLabel(String label) { |
|
JavaScript | apache-2.0 | 7f6ad0107326a225fbe0579cbc4907aed6de4d17 | 0 | werneckpaiva/retrato-js | var Settings = {
URL_PREFIX: "/",
URL_DATA_PREFIX: "/api/"
};
var StringUtil = {
sanitizeUrl: function(url){
url = url.replace(/([^:])[\/]+/g, '$1/');
return url;
},
humanizeName: function(name){
name = name.replace(/_/g, " ");
name = name.replace(/\.jpe?g/i, "");
return name;
}
};
var Fullscreen = {
open: function(element){
if (element.requestFullscreen) {
element.requestFullscreen();
} else if (element.webkitRequestFullscreen) {
element.webkitRequestFullscreen();
} else if (element.mozRequestFullScreen) {
element.mozRequestFullScreen();
} else if (element.msRequestFullscreen) {
element.msRequestFullscreen();
}
},
close: function close() {
if (document.exitFullscreen) {
document.exitFullscreen();
} else if (document.webkitExitFullscreen) {
document.webkitExitFullscreen();
} else if (document.mozCancelFullScreen) {
document.mozCancelFullScreen();
} else if (document.msExitFullscreen) {
document.msExitFullscreen();
}
},
onchange: function(handler){
document.addEventListener("fullscreenchange", handler);
document.addEventListener("webkitfullscreenchange", handler);
document.addEventListener("mozfullscreenchange", handler);
document.addEventListener("MSFullscreenChange", handler);
},
isActive: function(){
return document.fullscreenElement ||
document.mozFullScreenElement ||
document.webkitFullscreenElement ||
document.msFullscreenElement;
}
};
function AlbumMenu(model, conf){
var self = this;
var $view = null;
var $detailsButton = null;
var $fullscreenButton = null;
function init(){
$view = conf.view;
$detailsButton = conf.detailsButton;
$fullscreenButton = conf.fullscreenButton;
watch(model, "selectedPictureIndex", function(){
pinMenu();
showHideDetailsButton();
showHideFullscreenButton();
});
$detailsButton.click(function(event){
event.preventDefault();
showHideDetails();
});
$fullscreenButton.click(function(event){
event.preventDefault();
openCloseFullscreen();
});
Fullscreen.onchange(function(event){
$fullscreenButton.toggleClass("selected", Fullscreen.isActive());
});
showHideDetailsButton();
showHideFullscreenButton();
}
function pinMenu(){
if (model.selectedPictureIndex !== null){
$view.addClass("headroom--pinned").addClass("headroom--top");
$view.removeClass("headroom--not-top").removeClass("headroom--unpinned");
}
}
function showHideDetailsButton(){
if (model.selectedPictureIndex === null){
model.detailsOn = false;
$detailsButton.hide();
} else {
$detailsButton.show();
}
}
function showHideDetails(){
model.detailsOn = !model.detailsOn;
$detailsButton.toggleClass("selected", model.detailsOn);
}
function showHideFullscreenButton(){
if (model.selectedPictureIndex === null){
model.detailsOn = false;
$fullscreenButton.hide();
} else {
$fullscreenButton.show();
}
}
function openCloseFullscreen(){
if (Fullscreen.isActive()){
Fullscreen.close();
} else {
Fullscreen.open(document.getElementById("content"));
}
}
init();
}
function AlbumPhotos(model, conf){
var self = this;
var $view = null;
var $viewList = null;
var template = null;
var currentWidth = 0;
var heightProportion = null;
var lazyLoad = false;
var margin = 0;
function init(){
setConfiguration();
watch(model, "pictures", function(prop, action, newvalue, oldvalue){
var picturesChanged = Array.isArray(newvalue);
self.displayPictures(picturesChanged);
});
$(window).resize(function(){
self.resizePictures();
});
}
function setConfiguration(){
// Required
$view = conf.view;
template = conf.template;
// Optional
$viewList = (conf.listClass)? $view.find("."+conf.listClass) : $view;
heightProportion = (conf.heightProportion)? conf.heightProportion : 0.45;
lazyLoad = (conf.lazyLoad)? conf.lazyLoad : false;
margin = (conf.margin)? conf.margin : 0;
}
this.displayPictures = function(picturesChanged){
if (picturesChanged===false){
return;
}
$viewList.empty();
if(!model.pictures || model.pictures.length === 0){
$view.hide();
return;
}
$view.show();
var resize = new Resize(model.pictures, heightProportion);
currentWidth = $view.width();
var newPictures = resize.doResize(currentWidth, $(window).height());
var content = "";
for (var i=0; i<newPictures.length; i++){
var p = newPictures[i];
var params = {
width: p.newWidth-margin,
height: p.newHeight-margin
};
if (!lazyLoad){
params.src = model.pictures[i].thumb;
}
content += Mustache.render(template, params);
}
$viewList.html(content);
$viewList.find("img")
.each(function(i, el){
$(el).data("index", i);
})
.click(function(){
model.selectedPictureIndex = $(this).data("index");
});
if (lazyLoad){
startLazyLoading();
}
};
this.resizePictures = function(){
var newWidth = $view.width();
if (newWidth == currentWidth) return;
currentWidth = $view.width();
var resize = new Resize(model.pictures, heightProportion);
var newPictures = resize.doResize(currentWidth, $(window).height());
$viewList.children().each(function(index, item){
var p = newPictures[index];
var width = (p.newWidth-margin);
var height = (p.newHeight-margin);
$(this).css("width", width).css("height", height);
$(this).find("img").attr("width", width).attr("height", height);
});
};
function startLazyLoading(){
function loadNextPicture(){
if (index >= model.pictures.length){
return;
}
if (image.src == model.pictures[index].thumb){
index++;
loadNextPicture();
} else {
image.src = model.pictures[index].thumb;
}
}
var index = 0;
var image = new Image();
image.onload = function(){
$viewList.find("img:eq("+index+")")
.attr("src", this.src)
.show();
index++;
loadNextPicture();
};
loadNextPicture();
}
init();
} | src/album.js | var Settings = {
URL_PREFIX: "/",
URL_DATA_PREFIX: "/api/"
};
var StringUtil = {
sanitizeUrl: function(url){
url = url.replace(/([^:])[\/]+/g, '$1/');
return url;
},
humanizeName: function(name){
name = name.replace(/_/g, " ");
name = name.replace(/\.jpe?g/i, "");
return name;
}
};
var Fullscreen = {
open: function(element){
if (element.requestFullscreen) {
element.requestFullscreen();
} else if (element.webkitRequestFullscreen) {
element.webkitRequestFullscreen();
} else if (element.mozRequestFullScreen) {
element.mozRequestFullScreen();
} else if (element.msRequestFullscreen) {
element.msRequestFullscreen();
}
},
close: function close() {
if (document.exitFullscreen) {
document.exitFullscreen();
} else if (document.webkitExitFullscreen) {
document.webkitExitFullscreen();
} else if (document.mozCancelFullScreen) {
document.mozCancelFullScreen();
} else if (document.msExitFullscreen) {
document.msExitFullscreen();
}
},
onchange: function(handler){
document.addEventListener("fullscreenchange", handler);
document.addEventListener("webkitfullscreenchange", handler);
document.addEventListener("mozfullscreenchange", handler);
document.addEventListener("MSFullscreenChange", handler);
},
isActive: function(){
return document.fullscreenElement ||
document.mozFullScreenElement ||
document.webkitFullscreenElement ||
document.msFullscreenElement;
}
};
function AlbumMenu(model, conf){
var self = this;
var $view = null;
var $detailsButton = null;
var $fullscreenButton = null;
function init(){
$view = conf.view;
$detailsButton = conf.detailsButton;
$fullscreenButton = conf.fullscreenButton;
watch(model, "selectedPictureIndex", function(){
pinMenu();
showHideDetailsButton();
showHideFullscreenButton();
});
$detailsButton.click(function(event){
event.preventDefault();
showHideDetails();
});
$fullscreenButton.click(function(event){
event.preventDefault();
openCloseFullscreen();
});
Fullscreen.onchange(function(event){
console.log('fullscreen change')
$fullscreenButton.toggleClass("selected", Fullscreen.isActive());
});
showHideDetailsButton();
showHideFullscreenButton();
}
function pinMenu(){
if (model.selectedPictureIndex !== null){
$view.addClass("headroom--pinned").addClass("headroom--top");
$view.removeClass("headroom--not-top").removeClass("headroom--unpinned");
}
}
function showHideDetailsButton(){
if (model.selectedPictureIndex === null){
model.detailsOn = false;
$detailsButton.hide();
} else {
$detailsButton.show();
}
}
function showHideDetails(){
model.detailsOn = !model.detailsOn;
$detailsButton.toggleClass("selected", model.detailsOn);
}
function showHideFullscreenButton(){
if (model.selectedPictureIndex === null){
model.detailsOn = false;
$fullscreenButton.hide();
} else {
$fullscreenButton.show();
}
}
function openCloseFullscreen(){
if (Fullscreen.isActive()){
Fullscreen.close();
} else {
Fullscreen.open(document.getElementById("content"));
}
}
init();
}
function AlbumPhotos(model, conf){
var self = this;
var $view = null;
var $viewList = null;
var template = null;
var currentWidth = 0;
var heightProportion = null;
var lazyLoad = false;
var margin = 0;
function init(){
setConfiguration();
watch(model, "pictures", function(prop, action, newvalue, oldvalue){
var picturesChanged = Array.isArray(newvalue);
self.displayPictures(picturesChanged);
});
$(window).resize(function(){
self.resizePictures();
});
}
function setConfiguration(){
// Required
$view = conf.view;
template = conf.template;
// Optional
$viewList = (conf.listClass)? $view.find("."+conf.listClass) : $view;
heightProportion = (conf.heightProportion)? conf.heightProportion : 0.45;
lazyLoad = (conf.lazyLoad)? conf.lazyLoad : false;
margin = (conf.margin)? conf.margin : 0;
}
this.displayPictures = function(picturesChanged){
if (picturesChanged===false){
return;
}
$viewList.empty();
if(!model.pictures || model.pictures.length === 0){
$view.hide();
return;
}
$view.show();
var resize = new Resize(model.pictures, heightProportion);
currentWidth = $view.width();
var newPictures = resize.doResize(currentWidth, $(window).height());
var content = "";
for (var i=0; i<newPictures.length; i++){
var p = newPictures[i];
var params = {
width: p.newWidth-margin,
height: p.newHeight-margin
};
if (!lazyLoad){
params.src = model.pictures[i].thumb;
}
content += Mustache.render(template, params);
}
$viewList.html(content);
$viewList.find("img")
.each(function(i, el){
$(el).data("index", i);
})
.click(function(){
model.selectedPictureIndex = $(this).data("index");
});
if (lazyLoad){
startLazyLoading();
}
};
this.resizePictures = function(){
var newWidth = $view.width();
if (newWidth == currentWidth) return;
currentWidth = $view.width();
var resize = new Resize(model.pictures, heightProportion);
var newPictures = resize.doResize(currentWidth, $(window).height());
$viewList.children().each(function(index, item){
var p = newPictures[index];
var width = (p.newWidth-margin);
var height = (p.newHeight-margin);
$(this).css("width", width).css("height", height);
$(this).find("img").attr("width", width).attr("height", height);
});
};
function startLazyLoading(){
function loadNextPicture(){
if (index >= model.pictures.length){
return;
}
if (image.src == model.pictures[index].thumb){
index++;
loadNextPicture();
} else {
image.src = model.pictures[index].thumb;
}
}
var index = 0;
var image = new Image();
image.onload = function(){
$viewList.find("img:eq("+index+")")
.attr("src", this.src)
.show();
index++;
loadNextPicture();
};
loadNextPicture();
}
init();
} | Fullscreen fixed
| src/album.js | Fullscreen fixed | <ide><path>rc/album.js
<ide> });
<ide>
<ide> Fullscreen.onchange(function(event){
<del> console.log('fullscreen change')
<ide> $fullscreenButton.toggleClass("selected", Fullscreen.isActive());
<ide> });
<ide> |
|
Java | mit | 92ae1cf362c130ad20eb861d08fab1b7c2161b89 | 0 | gems-uff/prov-viewer | /*
* The MIT License
*
* Copyright 2017 Kohwalter.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package br.uff.ic.utility;
import br.uff.ic.provviewer.VariableNames;
import br.uff.ic.provviewer.Vertex.ColorScheme.ColorScheme;
import br.uff.ic.utility.graph.ActivityVertex;
import br.uff.ic.utility.graph.AgentVertex;
import br.uff.ic.utility.graph.Edge;
import br.uff.ic.utility.graph.EntityVertex;
import br.uff.ic.utility.graph.GraphVertex;
import br.uff.ic.utility.graph.Vertex;
import edu.uci.ics.jung.graph.DirectedGraph;
import edu.uci.ics.jung.graph.Graph;
import edu.uci.ics.jung.graph.util.Pair;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
/**
*
* @author Kohwalter
*/
public class GraphUtils {
public static Object hasAgentVertex(Object v) {
Object activity = null;
Object entity = null;
if (v instanceof Graph) {
for (Object vertex : ((Graph) v).getVertices()) {
if (vertex instanceof AgentVertex) {
return vertex;
} else if (vertex instanceof Graph) {
return hasAgentVertex(vertex);
} else if (vertex instanceof ActivityVertex) {
activity = vertex;
} else if (vertex instanceof EntityVertex) {
entity = vertex;
}
}
} else {
return v;
}
if (activity != null) {
return activity;
} else {
return entity;
}
}
public static int getCollapsedVertexSize(Object v) {
int graphSize = 0;
if (v instanceof GraphVertex) {
for (Object vertex : ((GraphVertex) v).clusterGraph.getVertices()) {
if (vertex instanceof GraphVertex) {
graphSize = graphSize + getCollapsedVertexSize(vertex);
} else {
graphSize++;
}
}
}
return graphSize;
//int graphSize = ((Graph) v).getVertexCount();
}
// TO DO: Get the mean of slopes if there are more than 1 vertex with the attribute
// TO DO: Allow for jumping vertices until finding the vertex with the same attribute (e.g., skip an entity between two activities)
public static float getSlope(Object node, ColorScheme colorScheme) {
double slope = Double.NEGATIVE_INFINITY;
for (Edge e : colorScheme.variables.graph.getOutEdges(node)) {
if (!((Vertex) e.getTarget()).getAttributeValue(colorScheme.attribute).contentEquals(VariableNames.UnknownValue)) {
float attValue = ((Vertex) node).getAttributeValueFloat(colorScheme.attribute) - ((Vertex) e.getTarget()).getAttributeValueFloat(colorScheme.attribute);
double time = ((Vertex) node).getTime() - ((Vertex) e.getTarget()).getTime();
if (time != 0) {
slope = attValue / time;
} else if ((attValue != 0) && (time == 0)) {
slope = attValue;
} else if (time == 0) {
slope = 0;
}
}
}
return (float) slope;
}
public static ArrayList<Float> getAttributeValuesFromVertices(DirectedGraph<Object, Edge> graph, String attribute) {
Collection<Object> nodes = graph.getVertices();
ArrayList<Float> values = new ArrayList<>();
for (Object node : nodes) {
if (!((Vertex) node).getAttributeValue(attribute).contentEquals(VariableNames.UnknownValue)) {
values.add(((Vertex) node).getAttributeValueFloat(attribute));
}
}
return values;
}
/**
* Method that returns a GraphVertex from the (Graph) v
* @param v is the Graph
* @return the GraphVertex
*/
public static GraphVertex CreateVertexGraph(Object v) {
Map<String, String> ids = new HashMap<>();
Map<String, GraphAttribute> attributes = new HashMap<>();
CreateVertexGraph(v, ids, attributes);
return new GraphVertex(ids, attributes, (Graph) v);
}
/**
* Method to update the attribute value that counts the number of vertices of the specified type
* @param attributes is the list of attributes already computed
* @param graphVertex is the current GraphVertex
* @param type is the type of vertices we are counting (Agents, Activities, Entities)
*/
private static void UpdateVertexTypeQuantityGraphVertex(Map<String, GraphAttribute> attributes, GraphVertex graphVertex, String type) {
if(attributes.containsKey(type)) {
int qnt = (int) Float.parseFloat(attributes.get(type).getValue());
qnt = (int) (qnt + ((Vertex)graphVertex).getAttributeValueFloat(type));
attributes.get(type).setValue(Integer.toString(qnt));
} else {
GraphAttribute att = new GraphAttribute(type, Integer.toString((int) ((Vertex)graphVertex).getAttributeValueFloat(type)));
attributes.put(att.getName(), att);
}
}
/**
* Method to update the attribute value that counts the number of vertices of the specified type
* @param attributes is the list of attributes already computed
* @param v is the current vertex
* @param type is the type of vertices we are counting (Agents, Activities, Entities)
*/
private static void UpdateVertexTypeQuantity(Map<String, GraphAttribute> attributes, Vertex v, String type) {
if(attributes.containsKey(type)) {
int qnt = (int) Float.parseFloat(attributes.get(type).getValue());
qnt++;
attributes.get(type).setValue(Integer.toString(qnt));
} else {
GraphAttribute att = new GraphAttribute(type, Integer.toString(1));
attributes.put(att.getName(), att);
}
}
/**
* Recursive method to generate the tooltip.
* It considers Graph vertices inside the collapsed vertex.
* @param v is the current vertex for the tooltip
* @param ids is all computed ids for the tooltip
* @param attributes is the attribute list for the tooltip
* @return
*/
public static void CreateVertexGraph(Object v,
Map<String, String> ids,
Map<String, GraphAttribute> attributes){
Collection vertices = ((Graph) v).getVertices();
for (Object vertex : vertices) {
if (!(vertex instanceof Graph))
{
ids.put(((Vertex) vertex).getID(), ((Vertex) vertex).getID());
if(vertex instanceof GraphVertex) {
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexAgentAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexAgentAttribute);
}
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexActivityAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexActivityAttribute);
}
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexEntityAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexEntityAttribute);
}
}else if (vertex instanceof AgentVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexAgentAttribute);
} else if (vertex instanceof ActivityVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexActivityAttribute);
} else if (vertex instanceof EntityVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexEntityAttribute);
}
for (GraphAttribute att : ((Vertex) vertex).getAttributes()) {
if (attributes.containsKey(att.getName())) {
if(!att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexAgentAttribute)
&& !att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexActivityAttribute)
&& !att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexEntityAttribute)) {
GraphAttribute temporary = attributes.get(att.getName());
temporary.updateAttribute(att.getAverageValue());
attributes.put(att.getName(), temporary);
}
} else {
attributes.put(att.getName(), new GraphAttribute(att.getName(), att.getAverageValue()));
}
}
}
else //(vertex instanceof Graph)
{
CreateVertexGraph(vertex, ids, attributes);
}
}
}
/**
* Method to return if two vertices belongs to the same type or if the graphvertex has the same type inside
* @param v1 first vertex
* @param v2 second vertex
* @return true if they belong to the same type or if the graphVertex has vertices of the same type inside it
*/
public static boolean isSameVertexTypes(Vertex v1, Vertex v2) {
if (v1.getNodeType().equalsIgnoreCase(v2.getNodeType())) {
return true;
}
else if(v1 instanceof GraphVertex || v2 instanceof GraphVertex) {
if(v1 instanceof GraphVertex) {
if(v1.hasAttribute(VariableNames.CollapsedVertexActivityAttribute) && v2 instanceof ActivityVertex) {
return true;
}
if(v1.hasAttribute(VariableNames.CollapsedVertexAgentAttribute) && v2 instanceof AgentVertex) {
return true;
}
if(v1.hasAttribute(VariableNames.CollapsedVertexEntityAttribute) && v2 instanceof EntityVertex) {
return true;
}
}
else if(v2 instanceof GraphVertex) {
if(v2.hasAttribute(VariableNames.CollapsedVertexActivityAttribute) && v1 instanceof ActivityVertex) {
return true;
}
if(v2.hasAttribute(VariableNames.CollapsedVertexAgentAttribute) && v1 instanceof AgentVertex) {
return true;
}
if(v2.hasAttribute(VariableNames.CollapsedVertexEntityAttribute) && v1 instanceof EntityVertex) {
return true;
}
}
}
return false;
}
/**
* Method that calculates the probability of using this edge
* @param graph is the graph
* @param e is the edge that connects both vertices
* @param numberOfGraphs is the total number of graphs used during the graph merges
* @return the probability of using this edge
*/
public static float getSubPathProbability(Graph graph, Edge e, int numberOfGraphs) {
Pair endpoints = graph.getEndpoints(e);
Object target = endpoints.getSecond();
int sources = graph.getInEdges(target).size();
if(sources == 1)
return 1;
else
return e.getEdgeFrequencyValue(numberOfGraphs);
}
/**
* Breadth first search algorithm that returns the minimum path if exist
* @param source is the source vertex
* @param target is the destination vertex
* @param graph is the graph
* @return the list of edges connecting source to target
*/
public static Map<String, Edge> BFS(Vertex source, Vertex target, Graph graph)
{
Map<String, Boolean> visited = new HashMap<>();
Map<String, Edge> path = new HashMap<>();
LinkedList<Vertex> queue = new LinkedList<>();
LinkedList<Edge> edgeQueue = new LinkedList<>();
// Mark the current node as visited and enqueue it
visited.put(source.getID(), Boolean.TRUE);
queue.add(source);
while (!queue.isEmpty())
{
// Dequeue a vertex from queue and print it
source = queue.poll();
Edge next = edgeQueue.poll();
if(next != null)
path.put(source.getID(), next);
System.out.print(source.getID() + " -> ");
// Get all adjacent vertices of the dequeued vertex s
// If a adjacent has not been visited, then mark it
// visited and enqueue it
Iterator<Object> i = graph.getInEdges(source).iterator();
while (i.hasNext())
{
Edge edge = (Edge) i.next();
Vertex n = (Vertex) edge.getSource();
if (!visited.containsKey(n.getID()))
{
visited.put(n.getID(), Boolean.TRUE);
queue.add(n);
edgeQueue.add(edge);
if(n.getID().equalsIgnoreCase(target.getID())) {
System.out.println("Reached target: " + n.getID());
path.put(n.getID(), edge);
return path;
}
}
}
}
System.out.println();
System.out.println("No path exists");
return null;
}
}
| src/main/java/br/uff/ic/utility/GraphUtils.java | /*
* The MIT License
*
* Copyright 2017 Kohwalter.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package br.uff.ic.utility;
import br.uff.ic.provviewer.VariableNames;
import br.uff.ic.provviewer.Vertex.ColorScheme.ColorScheme;
import br.uff.ic.utility.graph.ActivityVertex;
import br.uff.ic.utility.graph.AgentVertex;
import br.uff.ic.utility.graph.Edge;
import br.uff.ic.utility.graph.EntityVertex;
import br.uff.ic.utility.graph.GraphVertex;
import br.uff.ic.utility.graph.Vertex;
import edu.uci.ics.jung.graph.DirectedGraph;
import edu.uci.ics.jung.graph.Graph;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Kohwalter
*/
public class GraphUtils {
public static Object hasAgentVertex(Object v) {
Object activity = null;
Object entity = null;
if (v instanceof Graph) {
for (Object vertex : ((Graph) v).getVertices()) {
if (vertex instanceof AgentVertex) {
return vertex;
} else if (vertex instanceof Graph) {
return hasAgentVertex(vertex);
} else if (vertex instanceof ActivityVertex) {
activity = vertex;
} else if (vertex instanceof EntityVertex) {
entity = vertex;
}
}
} else {
return v;
}
if (activity != null) {
return activity;
} else {
return entity;
}
}
public static int getCollapsedVertexSize(Object v) {
int graphSize = 0;
if (v instanceof GraphVertex) {
for (Object vertex : ((GraphVertex) v).clusterGraph.getVertices()) {
if (vertex instanceof GraphVertex) {
graphSize = graphSize + getCollapsedVertexSize(vertex);
} else {
graphSize++;
}
}
}
return graphSize;
//int graphSize = ((Graph) v).getVertexCount();
}
// TO DO: Get the mean of slopes if there are more than 1 vertex with the attribute
// TO DO: Allow for jumping vertices until finding the vertex with the same attribute (e.g., skip an entity between two activities)
public static float getSlope(Object node, ColorScheme colorScheme) {
double slope = Double.NEGATIVE_INFINITY;
for (Edge e : colorScheme.variables.graph.getOutEdges(node)) {
if (!((Vertex) e.getTarget()).getAttributeValue(colorScheme.attribute).contentEquals(VariableNames.UnknownValue)) {
float attValue = ((Vertex) node).getAttributeValueFloat(colorScheme.attribute) - ((Vertex) e.getTarget()).getAttributeValueFloat(colorScheme.attribute);
double time = ((Vertex) node).getTime() - ((Vertex) e.getTarget()).getTime();
if (time != 0) {
slope = attValue / time;
} else if ((attValue != 0) && (time == 0)) {
slope = attValue;
} else if (time == 0) {
slope = 0;
}
}
}
return (float) slope;
}
public static ArrayList<Float> getAttributeValuesFromVertices(DirectedGraph<Object, Edge> graph, String attribute) {
Collection<Object> nodes = graph.getVertices();
ArrayList<Float> values = new ArrayList<>();
for (Object node : nodes) {
if (!((Vertex) node).getAttributeValue(attribute).contentEquals(VariableNames.UnknownValue)) {
values.add(((Vertex) node).getAttributeValueFloat(attribute));
}
}
return values;
}
/**
* Method that returns a GraphVertex from the (Graph) v
* @param v is the Graph
* @return the GraphVertex
*/
public static GraphVertex CreateVertexGraph(Object v) {
Map<String, String> ids = new HashMap<>();
Map<String, GraphAttribute> attributes = new HashMap<>();
CreateVertexGraph(v, ids, attributes);
return new GraphVertex(ids, attributes, (Graph) v);
}
/**
* Method to update the attribute value that counts the number of vertices of the specified type
* @param attributes is the list of attributes already computed
* @param graphVertex is the current GraphVertex
* @param type is the type of vertices we are counting (Agents, Activities, Entities)
*/
private static void UpdateVertexTypeQuantityGraphVertex(Map<String, GraphAttribute> attributes, GraphVertex graphVertex, String type) {
if(attributes.containsKey(type)) {
int qnt = (int) Float.parseFloat(attributes.get(type).getValue());
qnt = (int) (qnt + ((Vertex)graphVertex).getAttributeValueFloat(type));
attributes.get(type).setValue(Integer.toString(qnt));
} else {
GraphAttribute att = new GraphAttribute(type, Integer.toString((int) ((Vertex)graphVertex).getAttributeValueFloat(type)));
attributes.put(att.getName(), att);
}
}
/**
* Method to update the attribute value that counts the number of vertices of the specified type
* @param attributes is the list of attributes already computed
* @param v is the current vertex
* @param type is the type of vertices we are counting (Agents, Activities, Entities)
*/
private static void UpdateVertexTypeQuantity(Map<String, GraphAttribute> attributes, Vertex v, String type) {
if(attributes.containsKey(type)) {
int qnt = (int) Float.parseFloat(attributes.get(type).getValue());
qnt++;
attributes.get(type).setValue(Integer.toString(qnt));
} else {
GraphAttribute att = new GraphAttribute(type, Integer.toString(1));
attributes.put(att.getName(), att);
}
}
/**
* Recursive method to generate the tooltip.
* It considers Graph vertices inside the collapsed vertex.
* @param v is the current vertex for the tooltip
* @param ids is all computed ids for the tooltip
* @param attributes is the attribute list for the tooltip
* @return
*/
public static void CreateVertexGraph(Object v,
Map<String, String> ids,
Map<String, GraphAttribute> attributes){
Collection vertices = ((Graph) v).getVertices();
for (Object vertex : vertices) {
if (!(vertex instanceof Graph))
{
ids.put(((Vertex) vertex).getID(), ((Vertex) vertex).getID());
if(vertex instanceof GraphVertex) {
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexAgentAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexAgentAttribute);
}
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexActivityAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexActivityAttribute);
}
if(((Vertex)vertex).hasAttribute(VariableNames.CollapsedVertexEntityAttribute)){
UpdateVertexTypeQuantityGraphVertex(attributes, (GraphVertex)vertex, VariableNames.CollapsedVertexEntityAttribute);
}
}else if (vertex instanceof AgentVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexAgentAttribute);
} else if (vertex instanceof ActivityVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexActivityAttribute);
} else if (vertex instanceof EntityVertex) {
UpdateVertexTypeQuantity(attributes, (Vertex)vertex, VariableNames.CollapsedVertexEntityAttribute);
}
for (GraphAttribute att : ((Vertex) vertex).getAttributes()) {
if (attributes.containsKey(att.getName())) {
if(!att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexAgentAttribute)
&& !att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexActivityAttribute)
&& !att.getName().equalsIgnoreCase(VariableNames.CollapsedVertexEntityAttribute)) {
GraphAttribute temporary = attributes.get(att.getName());
temporary.updateAttribute(att.getAverageValue());
attributes.put(att.getName(), temporary);
}
} else {
attributes.put(att.getName(), new GraphAttribute(att.getName(), att.getAverageValue()));
}
}
}
else //(vertex instanceof Graph)
{
CreateVertexGraph(vertex, ids, attributes);
}
}
}
/**
* Method to return if two vertices belongs to the same type or if the graphvertex has the same type inside
* @param v1 first vertex
* @param v2 second vertex
* @return true if they belong to the same type or if the graphVertex has vertices of the same type inside it
*/
public static boolean isSameVertexTypes(Vertex v1, Vertex v2) {
if (v1.getNodeType().equalsIgnoreCase(v2.getNodeType())) {
return true;
}
else if(v1 instanceof GraphVertex || v2 instanceof GraphVertex) {
if(v1 instanceof GraphVertex) {
if(v1.hasAttribute(VariableNames.CollapsedVertexActivityAttribute) && v2 instanceof ActivityVertex) {
return true;
}
if(v1.hasAttribute(VariableNames.CollapsedVertexAgentAttribute) && v2 instanceof AgentVertex) {
return true;
}
if(v1.hasAttribute(VariableNames.CollapsedVertexEntityAttribute) && v2 instanceof EntityVertex) {
return true;
}
}
else if(v2 instanceof GraphVertex) {
if(v2.hasAttribute(VariableNames.CollapsedVertexActivityAttribute) && v1 instanceof ActivityVertex) {
return true;
}
if(v2.hasAttribute(VariableNames.CollapsedVertexAgentAttribute) && v1 instanceof AgentVertex) {
return true;
}
if(v2.hasAttribute(VariableNames.CollapsedVertexEntityAttribute) && v1 instanceof EntityVertex) {
return true;
}
}
}
return false;
}
}
| Added a BFS algorithm
Added the method getSubPathProbability | src/main/java/br/uff/ic/utility/GraphUtils.java | Added a BFS algorithm Added the method getSubPathProbability | <ide><path>rc/main/java/br/uff/ic/utility/GraphUtils.java
<ide> import br.uff.ic.utility.graph.Vertex;
<ide> import edu.uci.ics.jung.graph.DirectedGraph;
<ide> import edu.uci.ics.jung.graph.Graph;
<add>import edu.uci.ics.jung.graph.util.Pair;
<ide> import java.util.ArrayList;
<ide> import java.util.Collection;
<ide> import java.util.HashMap;
<add>import java.util.Iterator;
<add>import java.util.LinkedList;
<ide> import java.util.Map;
<ide>
<ide> /**
<ide> }
<ide> return false;
<ide> }
<add>
<add> /**
<add> * Method that calculates the probability of using this edge
<add> * @param graph is the graph
<add> * @param e is the edge that connects both vertices
<add> * @param numberOfGraphs is the total number of graphs used during the graph merges
<add> * @return the probability of using this edge
<add> */
<add> public static float getSubPathProbability(Graph graph, Edge e, int numberOfGraphs) {
<add> Pair endpoints = graph.getEndpoints(e);
<add> Object target = endpoints.getSecond();
<add> int sources = graph.getInEdges(target).size();
<add> if(sources == 1)
<add> return 1;
<add> else
<add> return e.getEdgeFrequencyValue(numberOfGraphs);
<add> }
<add>
<add> /**
<add> * Breadth first search algorithm that returns the minimum path if exist
<add> * @param source is the source vertex
<add> * @param target is the destination vertex
<add> * @param graph is the graph
<add> * @return the list of edges connecting source to target
<add> */
<add> public static Map<String, Edge> BFS(Vertex source, Vertex target, Graph graph)
<add> {
<add>
<add> Map<String, Boolean> visited = new HashMap<>();
<add> Map<String, Edge> path = new HashMap<>();
<add>
<add> LinkedList<Vertex> queue = new LinkedList<>();
<add> LinkedList<Edge> edgeQueue = new LinkedList<>();
<add>
<add> // Mark the current node as visited and enqueue it
<add> visited.put(source.getID(), Boolean.TRUE);
<add> queue.add(source);
<add>
<add> while (!queue.isEmpty())
<add> {
<add> // Dequeue a vertex from queue and print it
<add> source = queue.poll();
<add> Edge next = edgeQueue.poll();
<add> if(next != null)
<add> path.put(source.getID(), next);
<add> System.out.print(source.getID() + " -> ");
<add> // Get all adjacent vertices of the dequeued vertex s
<add> // If a adjacent has not been visited, then mark it
<add> // visited and enqueue it
<add> Iterator<Object> i = graph.getInEdges(source).iterator();
<add> while (i.hasNext())
<add> {
<add> Edge edge = (Edge) i.next();
<add> Vertex n = (Vertex) edge.getSource();
<add> if (!visited.containsKey(n.getID()))
<add> {
<add> visited.put(n.getID(), Boolean.TRUE);
<add> queue.add(n);
<add> edgeQueue.add(edge);
<add> if(n.getID().equalsIgnoreCase(target.getID())) {
<add> System.out.println("Reached target: " + n.getID());
<add> path.put(n.getID(), edge);
<add> return path;
<add> }
<add> }
<add> }
<add> }
<add> System.out.println();
<add> System.out.println("No path exists");
<add> return null;
<add> }
<ide> } |
|
Java | lgpl-2.1 | fcbccd560b98cab7004760660fc53fe5ed677a30 | 0 | ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal | /*
*
* Copyright (c) 2010 by Pieter Geerkens
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.build.module;
import VASSAL.build.AutoConfigurable;
import VASSAL.build.Buildable;
import VASSAL.build.GameModule;
import VASSAL.build.module.documentation.HelpFile;
import VASSAL.build.module.map.MassKeyCommand;
import VASSAL.command.Command;
import VASSAL.command.CommandEncoder;
import VASSAL.configure.TranslatableStringEnum;
import VASSAL.configure.VisibilityCondition;
import VASSAL.i18n.Resources;
import VASSAL.tools.SequenceEncoder;
import VASSAL.tools.UniqueIdManager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A Global Key Command that is automatically invoked on game start-up,
* once the various Key Listeners have been started.
* <p>
* As of 3.6, multiple Startup Global Key Commands can be depended on to
* process in the correct order.
*
* @author Pieter Geerkens
*
*/
public class StartupGlobalKeyCommand extends GlobalKeyCommand implements GameComponent, CommandEncoder, UniqueIdManager.Identifyable {
public static final String WHEN_TO_APPLY = "whenToApply"; //NON-NLS
public static final String APPLY_FIRST_LAUNCH_OF_SESSION = "firstLaunchOfSession"; //NON-NLS
public static final String APPLY_EVERY_LAUNCH_OF_SESSION = "everyLaunchOfSession"; //NON-NLS
public static final String APPLY_START_OF_GAME_ONLY = "startOfGameOnly"; //NON-NLS
private static final char DELIMITER = '\t'; //$NON-NLS-1$
public static final String COMMAND_PREFIX = "SGKC" + DELIMITER; //NON-NLS-1$
protected static final UniqueIdManager idMgr = new UniqueIdManager("SGKC"); //$NON-NLS-1$
protected String id = ""; // Our unique ID
public String whenToApply = APPLY_EVERY_LAUNCH_OF_SESSION;
private boolean hasEverApplied = false; // Has ever been applied during this session (NOT saved with game state)
private boolean hasAppliedThisGame = false; // Has ever been applied during this *game* (Saved with game state)
public static class Prompt extends TranslatableStringEnum {
@Override
public String[] getValidValues(AutoConfigurable target) {
return new String[]{ APPLY_FIRST_LAUNCH_OF_SESSION, APPLY_EVERY_LAUNCH_OF_SESSION, APPLY_START_OF_GAME_ONLY };
}
@Override
public String[] getI18nKeys(AutoConfigurable target) {
return new String[] {
"Editor.StartupGlobalKeyCommand.first_launch_of_session",
"Editor.StartupGlobalKeyCommand.every_launch_of_session",
"Editor.StartupGlobalKeyCommand.start_of_game_only"
};
}
}
@SuppressWarnings("removal")
public StartupGlobalKeyCommand() {
super();
condition = null;
/* These four fields pertaining to the physical representation of the
* GKC on the toolbar are not applicable in this implementation.
*/
launch.setAttribute(BUTTON_TEXT, ""); //NON-NLS
launch.setAttribute(TOOLTIP, ""); //NON-NLS
launch.setAttribute(ICON, ""); //NON-NLS
launch.setAttribute(HOTKEY, ""); //NON-NLS
}
@SuppressWarnings("removal")
public StartupGlobalKeyCommand(MassKeyCommand gkc) {
super(gkc);
condition = null;
/* These four fields pertaining to the physical representation of the
* GKC on the toolbar are not applicable in this implementation.
*/
launch.setAttribute(BUTTON_TEXT, ""); //NON-NLS
launch.setAttribute(TOOLTIP, ""); //NON-NLS
launch.setAttribute(ICON, ""); //NON-NLS
launch.setAttribute(HOTKEY, ""); //NON-NLS
}
//---------------------- GlobalKeyCommand extension ---------------------
@Override
public void addTo(Buildable parent) {
idMgr.add(this);
super.addTo(parent);
GameModule.getGameModule().getGameState().addGameComponent(this);
GameModule.getGameModule().addCommandEncoder(this);
}
@Override
public void removeFrom(Buildable parent) {
GameModule.getGameModule().getGameState().removeGameComponent(this);
GameModule.getGameModule().removeCommandEncoder(this);
idMgr.remove(this);
}
public static String getConfigureTypeName() {
return Resources.getString("Editor.StartupGlobalKeyCommand.component_type");
}
@Override
public HelpFile getHelpFile() {
return HelpFile.getReferenceManualPage("Map.html", "StartupGlobalKeyCommand"); //NON-NLS
}
@SuppressWarnings("removal")
@Override
public VisibilityCondition getAttributeVisibility(String key) {
if (List.of(BUTTON_TEXT, TOOLTIP, ICON, HOTKEY).contains(key)) {
return () -> false;
}
else {
return super.getAttributeVisibility(key);
}
}
@Override
public String[] getAttributeDescriptions() {
final List<String> descs = new ArrayList<>();
descs.add(Resources.getString("Editor.StartupGlobalKeyCommand.when_to_apply"));
Collections.addAll(descs, super.getAttributeDescriptions());
return descs.toArray(new String[0]);
}
@Override
public String[] getAttributeNames() {
final List<String> names = new ArrayList<>();
names.add(WHEN_TO_APPLY);
// Filter some of the crazy out of the original MassKeyCommand list, so we can add more things "safely"
for (final String n : super.getAttributeNames()) {
if (List.of(CHECK_VALUE, CHECK_PROPERTY, AFFECTED_PIECE_NAMES).contains(n)) {
continue;
}
names.add(n);
}
return names.toArray(new String[0]);
}
@Override
public Class<?>[] getAttributeTypes() {
final List<Class<?>> types = new ArrayList<>();
types.add(Prompt.class);
Collections.addAll(types, super.getAttributeTypes());
return types.toArray(new Class<?>[0]);
}
@Override
public void setAttribute(String key, Object value) {
if (WHEN_TO_APPLY.equals(key)) {
if (value instanceof String) {
whenToApply = (String)value;
}
}
else {
super.setAttribute(key, value);
}
}
@Override
public String getAttributeValueString(String key) {
if (WHEN_TO_APPLY.equals(key)) {
return whenToApply;
}
else {
return super.getAttributeValueString(key);
}
}
/**
* Apply the command, but only if it hasn't been marked as already-applied (by whatever its when-to-apply parameters are)
* @return true if command was applied
*/
public boolean applyIfNotApplied() {
if (APPLY_FIRST_LAUNCH_OF_SESSION.equals(whenToApply)) {
if (hasEverApplied) {
return false;
}
}
else if (APPLY_START_OF_GAME_ONLY.equals(whenToApply)) {
if (hasAppliedThisGame) {
return false;
}
}
hasEverApplied = true; // This one will be false again next time anything calls GameState.setup(true)
hasAppliedThisGame = true; // This one will be remembered as part of the game state (i.e. even after loading a game)
apply();
return true;
}
@Override
public void setup(boolean gameStarting) {
}
/**
* When initializing a new game from a Predefined Setup that loads a saved game, mark that this is actually a fresh game rather than a load of an old one
*/
public void freshGame() {
hasAppliedThisGame = false;
}
@Override
public Command getRestoreCommand() {
return new UpdateStartupGlobalKeyCommand(this, hasAppliedThisGame);
}
/**
* Sets our unique ID (among Startup Global Key Commands), so that multiple SGKCs can sort their save/restore commands from each other
* @param id Sets our unique ID
*/
@Override
public void setId(String id) {
this.id = id;
}
/**
* @return unique ID of this SGKC
*/
@Override
public String getId() {
return id;
}
/**
* Deserializes our command from a string version, if the command belongs to us.
* @param command Serialized string command
* @return An {@link ChessClockControl.UpdateStartupGlobalKeyCommand}
*/
@Override
public Command decode(final String command) {
if (!command.startsWith(COMMAND_PREFIX + getId() + DELIMITER)) {
return null;
}
final SequenceEncoder.Decoder decoder = new SequenceEncoder.Decoder(command, DELIMITER);
decoder.nextToken(); // Skip over the Command Prefix
decoder.nextToken(); // Skip over the Id
final boolean applied = decoder.nextBoolean(true);
return new UpdateStartupGlobalKeyCommand(this, applied);
}
/**
* Serializes our command into a string, if it belongs to us
* @param c Command to serialize. Only serialized if it's an UpdateClockControlCommand.
* @return Serialized command, or null if command passed wasn't an UpdateClockControlCommand.
*/
@Override
public String encode(final Command c) {
if (!(c instanceof UpdateStartupGlobalKeyCommand)) {
return null;
}
final UpdateStartupGlobalKeyCommand comm = (UpdateStartupGlobalKeyCommand) c;
final SequenceEncoder encoder = new SequenceEncoder(DELIMITER);
encoder.append(comm.getId());
encoder.append(comm.appliedThisGame);
if (!id.equals(comm.getId())) {
return null;
}
return COMMAND_PREFIX + encoder.getValue();
}
/**
* Our "command" format for remembering whether the key command has been applied during this game
*/
private static class UpdateStartupGlobalKeyCommand extends Command {
private final boolean appliedThisGame;
StartupGlobalKeyCommand sgkc;
public UpdateStartupGlobalKeyCommand(StartupGlobalKeyCommand sgkc, boolean appliedThisGame) {
this.sgkc = sgkc;
this.appliedThisGame = appliedThisGame;
}
@Override
protected void executeCommand() {
sgkc.hasAppliedThisGame = appliedThisGame;
}
@Override
protected Command myUndoCommand() {
return null;
}
public String getId() {
return sgkc.getId();
}
}
}
| vassal-app/src/main/java/VASSAL/build/module/StartupGlobalKeyCommand.java | /*
*
* Copyright (c) 2010 by Pieter Geerkens
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.build.module;
import VASSAL.build.AutoConfigurable;
import VASSAL.build.Buildable;
import VASSAL.build.GameModule;
import VASSAL.build.module.documentation.HelpFile;
import VASSAL.build.module.map.MassKeyCommand;
import VASSAL.command.Command;
import VASSAL.command.CommandEncoder;
import VASSAL.configure.TranslatableStringEnum;
import VASSAL.configure.VisibilityCondition;
import VASSAL.i18n.Resources;
import VASSAL.tools.SequenceEncoder;
import VASSAL.tools.UniqueIdManager;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A Global Key Command that is automatically invoked on game start-up,
* once the various Key Listeners have been started.
* <p>
* As of 3.6, multiple Startup Global Key Commands can be depended on to
* process in the correct order.
*
* @author Pieter Geerkens
*
*/
public class StartupGlobalKeyCommand extends GlobalKeyCommand implements GameComponent, CommandEncoder, UniqueIdManager.Identifyable {
public static final String WHEN_TO_APPLY = "whenToApply"; //NON-NLS
public static final String APPLY_FIRST_LAUNCH_OF_SESSION = "firstLaunchOfSession"; //NON-NLS
public static final String APPLY_EVERY_LAUNCH_OF_SESSION = "everyLaunchOfSession"; //NON-NLS
public static final String APPLY_START_OF_GAME_ONLY = "startOfGameOnly"; //NON-NLS
private static final char DELIMITER = '\t'; //$NON-NLS-1$
public static final String COMMAND_PREFIX = "SGKC" + DELIMITER; //NON-NLS-1$
protected static final UniqueIdManager idMgr = new UniqueIdManager("SGKC"); //$NON-NLS-1$
protected String id = ""; // Our unique ID
public String whenToApply = APPLY_FIRST_LAUNCH_OF_SESSION;
private boolean hasEverApplied = false; // Has ever been applied during this session (NOT saved with game state)
private boolean hasAppliedThisGame = false; // Has ever been applied during this *game* (Saved with game state)
public static class Prompt extends TranslatableStringEnum {
@Override
public String[] getValidValues(AutoConfigurable target) {
return new String[]{ APPLY_FIRST_LAUNCH_OF_SESSION, APPLY_EVERY_LAUNCH_OF_SESSION, APPLY_START_OF_GAME_ONLY };
}
@Override
public String[] getI18nKeys(AutoConfigurable target) {
return new String[] {
"Editor.StartupGlobalKeyCommand.first_launch_of_session",
"Editor.StartupGlobalKeyCommand.every_launch_of_session",
"Editor.StartupGlobalKeyCommand.start_of_game_only"
};
}
}
@SuppressWarnings("removal")
public StartupGlobalKeyCommand() {
super();
condition = null;
/* These four fields pertaining to the physical representation of the
* GKC on the toolbar are not applicable in this implementation.
*/
launch.setAttribute(BUTTON_TEXT, ""); //NON-NLS
launch.setAttribute(TOOLTIP, ""); //NON-NLS
launch.setAttribute(ICON, ""); //NON-NLS
launch.setAttribute(HOTKEY, ""); //NON-NLS
}
@SuppressWarnings("removal")
public StartupGlobalKeyCommand(MassKeyCommand gkc) {
super(gkc);
condition = null;
/* These four fields pertaining to the physical representation of the
* GKC on the toolbar are not applicable in this implementation.
*/
launch.setAttribute(BUTTON_TEXT, ""); //NON-NLS
launch.setAttribute(TOOLTIP, ""); //NON-NLS
launch.setAttribute(ICON, ""); //NON-NLS
launch.setAttribute(HOTKEY, ""); //NON-NLS
}
//---------------------- GlobalKeyCommand extension ---------------------
@Override
public void addTo(Buildable parent) {
idMgr.add(this);
super.addTo(parent);
GameModule.getGameModule().getGameState().addGameComponent(this);
GameModule.getGameModule().addCommandEncoder(this);
}
@Override
public void removeFrom(Buildable parent) {
GameModule.getGameModule().getGameState().removeGameComponent(this);
GameModule.getGameModule().removeCommandEncoder(this);
idMgr.remove(this);
}
public static String getConfigureTypeName() {
return Resources.getString("Editor.StartupGlobalKeyCommand.component_type");
}
@Override
public HelpFile getHelpFile() {
return HelpFile.getReferenceManualPage("Map.html", "StartupGlobalKeyCommand"); //NON-NLS
}
@SuppressWarnings("removal")
@Override
public VisibilityCondition getAttributeVisibility(String key) {
if (List.of(BUTTON_TEXT, TOOLTIP, ICON, HOTKEY).contains(key)) {
return () -> false;
}
else {
return super.getAttributeVisibility(key);
}
}
@Override
public String[] getAttributeDescriptions() {
final List<String> descs = new ArrayList<>();
descs.add(Resources.getString("Editor.StartupGlobalKeyCommand.when_to_apply"));
Collections.addAll(descs, super.getAttributeDescriptions());
return descs.toArray(new String[0]);
}
@Override
public String[] getAttributeNames() {
final List<String> names = new ArrayList<>();
names.add(WHEN_TO_APPLY);
// Filter some of the crazy out of the original MassKeyCommand list, so we can add more things "safely"
for (final String n : super.getAttributeNames()) {
if (List.of(CHECK_VALUE, CHECK_PROPERTY, AFFECTED_PIECE_NAMES).contains(n)) {
continue;
}
names.add(n);
}
return names.toArray(new String[0]);
}
@Override
public Class<?>[] getAttributeTypes() {
final List<Class<?>> types = new ArrayList<>();
types.add(Prompt.class);
Collections.addAll(types, super.getAttributeTypes());
return types.toArray(new Class<?>[0]);
}
@Override
public void setAttribute(String key, Object value) {
if (WHEN_TO_APPLY.equals(key)) {
if (value instanceof String) {
whenToApply = (String)value;
}
}
else {
super.setAttribute(key, value);
}
}
@Override
public String getAttributeValueString(String key) {
if (WHEN_TO_APPLY.equals(key)) {
return whenToApply;
}
else {
return super.getAttributeValueString(key);
}
}
/**
* Apply the command, but only if it hasn't been marked as already-applied (by whatever its when-to-apply parameters are)
* @return true if command was applied
*/
public boolean applyIfNotApplied() {
if (APPLY_FIRST_LAUNCH_OF_SESSION.equals(whenToApply)) {
if (hasEverApplied) {
return false;
}
}
else if (APPLY_START_OF_GAME_ONLY.equals(whenToApply)) {
if (hasAppliedThisGame) {
return false;
}
}
hasEverApplied = true; // This one will be false again next time anything calls GameState.setup(true)
hasAppliedThisGame = true; // This one will be remembered as part of the game state (i.e. even after loading a game)
apply();
return true;
}
@Override
public void setup(boolean gameStarting) {
}
/**
* When initializing a new game from a Predefined Setup that loads a saved game, mark that this is actually a fresh game rather than a load of an old one
*/
public void freshGame() {
hasAppliedThisGame = false;
}
@Override
public Command getRestoreCommand() {
return new UpdateStartupGlobalKeyCommand(this, hasAppliedThisGame);
}
/**
* Sets our unique ID (among Startup Global Key Commands), so that multiple SGKCs can sort their save/restore commands from each other
* @param id Sets our unique ID
*/
@Override
public void setId(String id) {
this.id = id;
}
/**
* @return unique ID of this SGKC
*/
@Override
public String getId() {
return id;
}
/**
* Deserializes our command from a string version, if the command belongs to us.
* @param command Serialized string command
* @return An {@link ChessClockControl.UpdateStartupGlobalKeyCommand}
*/
@Override
public Command decode(final String command) {
if (!command.startsWith(COMMAND_PREFIX + getId() + DELIMITER)) {
return null;
}
final SequenceEncoder.Decoder decoder = new SequenceEncoder.Decoder(command, DELIMITER);
decoder.nextToken(); // Skip over the Command Prefix
decoder.nextToken(); // Skip over the Id
final boolean applied = decoder.nextBoolean(true);
return new UpdateStartupGlobalKeyCommand(this, applied);
}
/**
* Serializes our command into a string, if it belongs to us
* @param c Command to serialize. Only serialized if it's an UpdateClockControlCommand.
* @return Serialized command, or null if command passed wasn't an UpdateClockControlCommand.
*/
@Override
public String encode(final Command c) {
if (!(c instanceof UpdateStartupGlobalKeyCommand)) {
return null;
}
final UpdateStartupGlobalKeyCommand comm = (UpdateStartupGlobalKeyCommand) c;
final SequenceEncoder encoder = new SequenceEncoder(DELIMITER);
encoder.append(comm.getId());
encoder.append(comm.appliedThisGame);
if (!id.equals(comm.getId())) {
return null;
}
return COMMAND_PREFIX + encoder.getValue();
}
/**
* Our "command" format for remembering whether the key command has been applied during this game
*/
private static class UpdateStartupGlobalKeyCommand extends Command {
private final boolean appliedThisGame;
StartupGlobalKeyCommand sgkc;
public UpdateStartupGlobalKeyCommand(StartupGlobalKeyCommand sgkc, boolean appliedThisGame) {
this.sgkc = sgkc;
this.appliedThisGame = appliedThisGame;
}
@Override
protected void executeCommand() {
sgkc.hasAppliedThisGame = appliedThisGame;
}
@Override
protected Command myUndoCommand() {
return null;
}
public String getId() {
return sgkc.getId();
}
}
}
| match default to original behavior
| vassal-app/src/main/java/VASSAL/build/module/StartupGlobalKeyCommand.java | match default to original behavior | <ide><path>assal-app/src/main/java/VASSAL/build/module/StartupGlobalKeyCommand.java
<ide> protected static final UniqueIdManager idMgr = new UniqueIdManager("SGKC"); //$NON-NLS-1$
<ide> protected String id = ""; // Our unique ID
<ide>
<del> public String whenToApply = APPLY_FIRST_LAUNCH_OF_SESSION;
<add> public String whenToApply = APPLY_EVERY_LAUNCH_OF_SESSION;
<ide>
<ide> private boolean hasEverApplied = false; // Has ever been applied during this session (NOT saved with game state)
<ide> private boolean hasAppliedThisGame = false; // Has ever been applied during this *game* (Saved with game state) |
|
Java | mit | a5e109f9966ee8401db23e6126e9249853838e36 | 0 | AvatarHurden/Tribal-Wars-Engine | package custom_components;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.Insets;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.AbstractButton;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.border.LineBorder;
import javax.swing.border.MatteBorder;
import javax.swing.border.SoftBevelBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.PlainDocument;
import property_classes.Property;
import property_classes.Property_Boolean;
import property_classes.Property_Escolha;
import property_classes.Property_Nome;
import property_classes.Property_Number;
import property_classes.Property_UnidadeList;
import selecionar_mundo.GUI;
import config.File_Manager;
import config.Mundo_Reader;
import database.Cores;
import database.Mundo;
import database.Unidade;
/**
* Dialog that contains info to edit a list (Mundos and ModeloTropas)
*
*
*
* @author Arthur
*
*/
@SuppressWarnings({ "rawtypes", "unchecked", "serial" })
public class EditDialog extends JDialog {
List<Object> objects;
Map<Object, ArrayList<Property>> variableMap;
Field variableField;
// Used for setting the visibility of the information panels
List<ObjectInterface> interfaceList = new ArrayList<ObjectInterface>();
private ObjectInterface selectedInterface;
JPanel namePanel;
JScrollPane scroll;
int listNumber = 0;
JPanel informationPanel = new JPanel();
JButton saveButton, upButton, downButton;
//TODO not receive the Field, extract it from the objects
//TODO reduce time to run program
// 1 object = 991 milissegundos
// 56 objects = 2218 milissegundos
// teoricamente, a contruo sem objetos demora 968,69 milissegundos
public EditDialog(List objects, Field variableField) {
this.objects = objects;
this.variableField = variableField;
variableMap = new HashMap<Object, ArrayList<Property>>();
for(Object o : objects)
try {
variableMap.put(o, (ArrayList<Property>)variableField.get(o));
} catch (IllegalArgumentException | IllegalAccessException e) {
e.printStackTrace();
}
setLayout(new GridBagLayout());
getContentPane().setBackground(Cores.ALTERNAR_ESCURO);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.BOTH;
c.insets = new Insets(0,0,0,0);
makeScrollPanel(c);
for (Object o : objects){
createInterface(o);
addInterfaceToScroll(interfaceList.get(objects.indexOf(o)), listNumber++);
}
informationPanel.setBackground(Cores.ALTERNAR_ESCURO);
c.gridy++;
add(informationPanel,c);
c.gridy++;
c.gridwidth = 2;
// c.insets = new Insets(10,0,0,0);
add(makeEditPanel(), c);
interfaceList.get(0).setSelected(true);
pack();
setVisible(true);
setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
}
private void createInterface(Object o) {
ObjectInterface oi = new ObjectInterface(o, variableMap.get(o));
interfaceList.add(oi);
}
private void addInterfaceToScroll(ObjectInterface oi, int position) {
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.anchor = GridBagConstraints.NORTH;
c.gridy = position;
namePanel.add(oi.objectName,c);
c.gridy = 0;
informationPanel.add(oi.objectInformation, c);
}
private void removeInterfaceFromScroll(ObjectInterface oi){
namePanel.remove(oi.objectName);
informationPanel.remove(oi.objectInformation);
}
private void makeScrollPanel(GridBagConstraints c) {
namePanel = new JPanel();
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { 140 };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
namePanel.setLayout(layout);
scroll = new JScrollPane(namePanel);
scroll.setPreferredSize(new Dimension(160,
informationPanel.getPreferredSize().height));
add(scroll,c);
}
private JPanel makeEditPanel() {
JPanel panel = new JPanel();
panel.setBackground(Cores.FUNDO_CLARO);
panel.setBorder(new MatteBorder(3, 0, 0, 0, Cores.SEPARAR_ESCURO));
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { 160, informationPanel.getPreferredSize().width };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
panel.setLayout(layout);
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(10,0,10,0);
c.gridy = 0;
c.gridx = 0;
JButton newButton = new JButton("Novo");
newButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
try {
Object obj = objects.get(0).getClass().newInstance();
variableMap.put(obj, (ArrayList<Property>)variableField.get(obj));
// Puts object in the list
objects.add(obj);
// Creates the interface for the object
createInterface(obj);
// Adds interface to the scroll
addInterfaceToScroll(interfaceList.get(interfaceList.size()-1),
interfaceList.size()-1);
pack();
scroll.getVerticalScrollBar().setValue(
scroll.getVerticalScrollBar().getMaximum());
selectedInterface.setSelected(false);
interfaceList.get(interfaceList.size()-1).setSelected(true);
selectedInterface.saveObejct();
} catch (InstantiationException | IllegalAccessException
| IllegalArgumentException e) {
e.printStackTrace();
}
}
});
panel.add(newButton,c);
JPanel rightPanel = new JPanel();
rightPanel.setOpaque(false);
saveButton = new JButton("Salvar");
saveButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
selectedInterface.saveObejct();
}
});
c.gridx++;
rightPanel.add(saveButton, c);
upButton = new JButton(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/up_arrow.png"))));
upButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int position = objects.indexOf(selectedInterface.object);
if (position > 0) {
Collections.swap(objects, position, position-1);
Collections.swap(interfaceList, position, position-1);
removeInterfaceFromScroll(interfaceList.get(position));
removeInterfaceFromScroll(interfaceList.get(position-1));
addInterfaceToScroll(selectedInterface, position-1);
addInterfaceToScroll(interfaceList.get(position), position);
revalidate();
changeButtons();
}
}
});
rightPanel.add(upButton, c);
downButton = new JButton(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/down_arrow.png"))));
downButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int position = objects.indexOf(selectedInterface.object);
if (position < interfaceList.size()-1) {
Collections.swap(objects, position, position+1);
Collections.swap(interfaceList, position, position+1);
removeInterfaceFromScroll(interfaceList.get(position));
removeInterfaceFromScroll(interfaceList.get(position+1));
addInterfaceToScroll(selectedInterface, position+1);
addInterfaceToScroll(interfaceList.get(position), position);
scroll.getVerticalScrollBar().setValue(32*38);
revalidate();
changeButtons();
}
}
});
c.gridx++;
rightPanel.add(downButton, c);
JButton deleteButton = new JButton("Delete");
deleteButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
//TODO change text
int delete = JOptionPane.showConfirmDialog(null,
new JLabel("Deseja deletar para sempre?"),
"Confirmar", JOptionPane.YES_NO_OPTION);
if (delete == JOptionPane.YES_OPTION) {
int position = interfaceList.indexOf(selectedInterface);
removeInterfaceFromScroll(selectedInterface);
interfaceList.remove(selectedInterface);
objects.remove(selectedInterface.object);
for (int i = position; i < interfaceList.size(); i++) {
removeInterfaceFromScroll(interfaceList.get(i));
addInterfaceToScroll(interfaceList.get(i), i);
}
// Se o ltimo objeto foi deletado, seleciona o
// anterior. Caso contrrio, o seguinte
if (position == interfaceList.size())
interfaceList.get(position-1).setSelected(true);
else
interfaceList.get(position).setSelected(true);
changeButtons();
}
}
});
c.gridx++;
rightPanel.add(deleteButton, c);
c.gridx = 1;
panel.add(rightPanel,c);
return panel;
}
// Turns the save, up and down buttons on or off as needed
private void changeButtons() {
saveButton.setEnabled(!selectedInterface.isSaved());
upButton.setEnabled(interfaceList.indexOf(selectedInterface) != 0);
downButton.setEnabled(interfaceList.indexOf(selectedInterface)
!= interfaceList.size()-1);
}
private class ObjectInterface {
/**
* The object that this interface references.
* <br> All editing and saving of the object is done in the <class>Object
* Interface</class>.
*/
private Object object;
private JPanel objectName;
private JPanel objectInformation;
// JTextField that contains the name of the object. Needed because name is very important
private JTextField nameTextField;
// Boolean that says if the object has been saved after any changes
private boolean isSaved = true;
// The symbol to be added to the objectName when it is unsaved
private JLabel unsavedSignal = new JLabel(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/teste.png"))));
/**
* Maps the object's properties to an object that contains the property's value
* <br>This is the mapping that is made depending on the type of property:
* <br>
* <br> Nome : JTextField
* <br> Boolean : JCheckBox
* <br> Escolha : ButtonGroup
* <br> Number : JTextField
* <br> UnidadeList : HashMap<\Unidade, TroopFormattedTextField>
*/
private Map<Property, Object> variableMap = new HashMap<Property, Object>();
public ObjectInterface(Object object, List<Property> list) {
this.object = object;
createNamePanel(object.toString());
createInformationPanel(list);
}
private void createNamePanel(String s) {
objectName = new JPanel();
objectName.add(new JLabel(s));
objectName.setBackground(Cores.FUNDO_CLARO);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.RAISED));
objectName.addMouseListener(new MouseListener() {
public void mouseReleased(MouseEvent arg0) {}
public void mousePressed(MouseEvent arg0) {}
public void mouseExited(MouseEvent arg0) {}
public void mouseEntered(MouseEvent arg0) {}
public void mouseClicked(MouseEvent arg0) {
selectedInterface.setSelected(false);
setSelected(true);
}
});
}
private void createInformationPanel(List<Property> list) {
objectInformation = new JPanel(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(0, 10, 5, 10);
c.fill = GridBagConstraints.HORIZONTAL;
c.gridy = 0;
for (Property i : list) {
if (i.getClass().equals(Property_Nome.class)) {
objectInformation.add(makeNamePanel((Property_Nome) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Boolean.class)) {
objectInformation.add(makeBooleanPanel((Property_Boolean) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Escolha.class)) {
objectInformation.add(makeEscolhaPanel((Property_Escolha) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Number.class)) {
objectInformation.add(makeNumberPanel((Property_Number) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_UnidadeList.class)) {
objectInformation.add(makeUnidadeListPanel((Property_UnidadeList) i), c);
c.gridy++;
}
}
objectInformation.setBackground(Cores.ALTERNAR_ESCURO);
objectInformation.setVisible(false);
}
private JPanel makeNamePanel(Property_Nome variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
c.anchor = GridBagConstraints.WEST;
panel.add(new JLabel("Nome"), c);
nameTextField = new JTextField(16);
nameTextField.setText(variable.getName());
nameTextField.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.anchor = GridBagConstraints.EAST;
c.gridy++;
c.gridwidth = 2;
panel.add(nameTextField, c);
variableMap.put(variable, nameTextField);
return panel;
}
private JPanel makeBooleanPanel(Property_Boolean variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
JCheckBox checkBox = new JCheckBox();
checkBox.setOpaque(false);
checkBox.setSelected(variable.getValue());
checkBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
setSaved(false);
}
});
c.gridx++;
panel.add(checkBox, c);
variableMap.put(variable, checkBox);
return panel;
}
private JPanel makeEscolhaPanel(Property_Escolha variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
ButtonGroup buttonGroup = new ButtonGroup();
JPanel buttonPanel = new JPanel(new GridLayout(0,1));
buttonPanel.setOpaque(false);
for (String s : variable.getOptions()) {
JRadioButton button = new JRadioButton(s);
button.setOpaque(false);
button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
setSaved(false);
}
});
if (variable.isOption(s))
button.setSelected(true);
buttonGroup.add(button);
buttonPanel.add(button);
}
// Code for retrieving the selected value
// Enumeration<AbstractButton> enumeration = buttonGroup.getElements();
//
// while (enumeration.hasMoreElements()) {
//
// AbstractButton b = enumeration.nextElement();
//
// if (b.isSelected())
// System.out.println(b.getText());
//
// }
c.gridx++;
panel.add(buttonPanel, c);
variableMap.put(variable, buttonGroup);
return panel;
}
private JPanel makeNumberPanel(Property_Number variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
JTextField txt = new JTextField(variable.getValue().toString());
txt.setDocument(new PlainDocument() {
@Override
public void insertString(int offset, String str, AttributeSet attr)
throws BadLocationException {
if (str == null)
return;
// Only does anything if it is a comma, period or number
if (str.charAt(0) == '.' || str.charAt(0) == ',' ||
Character.isDigit(str.charAt(0))) {
// If it is a comma or perido, check if it already has one
if (!Character.isDigit(str.charAt(0)) &&
super.getText(0, getLength()).contains(".")) {}
else
// If it does not and the inserted is a comma, add a period
if (str.charAt(0) == ',')
super.insertString(offset, ".", attr);
else
// Else, add the inserted character (period or number)
super.insertString(offset, str, attr);
}
}
});
txt.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent f) {
if (((JTextField) f.getSource()).getText().equals("")) {
((JTextField) f.getSource()).setText("1");
}
}
public void focusGained(FocusEvent arg0) {}
});
txt.setText(variable.getValue().toString());
txt.setColumns(5);
txt.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.gridx++;
panel.add(txt, c);
variableMap.put(variable, txt);
return panel;
}
private JPanel makeUnidadeListPanel(Property_UnidadeList variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = -1;
c.gridx = 0;
c.gridwidth = 1;
Map<Unidade, TroopFormattedTextField> map = new HashMap<Unidade, TroopFormattedTextField>();
for (Unidade i : Unidade.values()) {
c.gridx = 0;
c.gridy++;
panel.add(new JLabel(i.nome()), c);
TroopFormattedTextField txt = new TroopFormattedTextField(9) {
public void go() {}
};
txt.setText(variable.get(i).toString());
txt.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.gridx = 1;
panel.add(txt, c);
map.put(i, txt);
}
variableMap.put(variable, map);
return panel;
}
/**
* Creates the default panel, with adequate colors, borders and layout
*/
private JPanel makeDefaultPanel() {
JPanel panel = new JPanel();
panel.setBackground(Cores.FUNDO_CLARO);
panel.setBorder(new LineBorder(Cores.SEPARAR_CLARO));
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { , };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
panel.setLayout(layout);
//Adds a listener to turn 'isSaved' false
return panel;
}
private void setSaved(boolean saved) {
isSaved = saved;
if (saved)
objectName.remove(unsavedSignal);
else
objectName.add(unsavedSignal,0);
objectName.revalidate();
objectName.repaint();
changeButtons();
}
public void setSelected(boolean isSelected) {
if (isSelected) {
objectInformation.setVisible(true);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.LOWERED));
objectName.setBackground(Cores.FUNDO_ESCURO);
selectedInterface = this;
changeButtons();
} else {
objectInformation.setVisible(false);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.RAISED));
objectName.setBackground(Cores.FUNDO_CLARO);
}
}
private void saveObejct() {
if (isUniqueName(nameTextField.getText())) {
for (Entry<Property, Object> i : variableMap.entrySet()) {
// Nome case
if (i.getKey().getClass().equals(Property_Nome.class)) {
// only does this if the name is different
if (!i.getKey().getName().equals(((JTextField) i.getValue()).getText())) {
i.getKey().setValue(((JTextField) i.getValue()).getText());
objectName.removeAll();
objectName.add(new JLabel(object.toString()));
objectName.revalidate();
objectName.repaint();
}
// Boolean case
} else if (i.getKey().getClass().equals(Property_Boolean.class)) {
i.getKey().setValue(((JCheckBox) i.getValue()).isSelected());
// Escolha case
} else if (i.getKey().getClass().equals(Property_Escolha.class)) {
Enumeration<AbstractButton> enumeration =
((ButtonGroup) i.getValue()).getElements();
while (enumeration.hasMoreElements()) {
AbstractButton b = enumeration.nextElement();
if (b.isSelected())
i.getKey().setValue(b.getText());
}
// Number case
} else if (i.getKey().getClass().equals(Property_Number.class)) {
i.getKey().setValue(((JTextField) i.getValue()).getText());
// UnidadeList case
} else if (i.getKey().getClass().equals(Property_UnidadeList.class)) {
Map<Unidade, BigDecimal> map = new HashMap<Unidade, BigDecimal>();
for (Entry<Unidade, TroopFormattedTextField> x
: ((HashMap<Unidade, TroopFormattedTextField>) i .getValue()).entrySet()) {
map.put(x.getKey(), x.getValue().getValue());
}
i.getKey().setValue(map);
} // else if (UnidadeList)
} // for (EntrySet)
setSaved(true);
} else {
String s = nameTextField.getText();
do {
// JOptionPane.showMessageDialog(null,
// nameTextField, "t", 1);
s = (String)(JOptionPane.showInputDialog(null,
new JLabel("<html>Esse nome j est sendo utilizado.<br>Favor escolher outro.</html>"),
"Nome j utilizado", JOptionPane.ERROR_MESSAGE, null, null, s));
} while (!isUniqueName(s));
if (s != null) {
nameTextField.setText(s);
saveObejct();
} else
setSaved(false);
// JOptionPane.showInputDialog(null);
// JOptionPane.showMessageDialog(null, "Esse nome j est sendo utilizado.\nFavor escolher outro.");
// nameTextField.requestFocus();
// nameTextField.selectAll();
}
}
private boolean isUniqueName(String s) {
for (Object o : objects)
if (o.toString().equals(s) && o != object)
return false;
return true;
}
private boolean isSaved() {
return isSaved;
}
public String toString(){
return object.toString();
}
}
public static void main(String args[]) {
File_Manager.read();
File_Manager.defineMundos();
Mundo_Reader.setMundoSelecionado(Mundo_Reader.getMundo(23));
File_Manager.defineModelos();
try {
EditDialog test = new EditDialog(Mundo_Reader.getMundoList(),
Mundo.class.getDeclaredField("variableList"));
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchFieldException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
| resources/custom_components/EditDialog.java | package custom_components;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.Insets;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.AbstractButton;
import javax.swing.ButtonGroup;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.JScrollPane;
import javax.swing.JTextField;
import javax.swing.border.LineBorder;
import javax.swing.border.MatteBorder;
import javax.swing.border.SoftBevelBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.PlainDocument;
import property_classes.Property;
import property_classes.Property_Boolean;
import property_classes.Property_Escolha;
import property_classes.Property_Nome;
import property_classes.Property_Number;
import property_classes.Property_UnidadeList;
import selecionar_mundo.GUI;
import config.File_Manager;
import config.Mundo_Reader;
import database.Cores;
import database.Mundo;
import database.Unidade;
/**
* Dialog that contains info to edit a list (Mundos and ModeloTropas)
*
*
*
* @author Arthur
*
*/
@SuppressWarnings({ "rawtypes", "unchecked", "serial" })
public class EditDialog extends JDialog {
List<Object> objects;
Map<Object, ArrayList<Property>> variableMap;
Field variableField;
// Used for setting the visibility of the information panels
List<ObjectInterface> interfaceList = new ArrayList<ObjectInterface>();
private ObjectInterface selectedInterface;
JPanel namePanel;
JScrollPane scroll;
int listNumber = 0;
JPanel informationPanel = new JPanel();
JButton saveButton, upButton, downButton;
//TODO not receive the Field, extract it from the objects
//TODO reduce time to run program
// 1 object = 991 milissegundos
// 56 objects = 2218 milissegundos
// teoricamente, a contruo sem objetos demora 968,69 milissegundos
public EditDialog(List objects, Field variableField) {
this.objects = objects;
this.variableField = variableField;
variableMap = new HashMap<Object, ArrayList<Property>>();
for(Object o : objects)
try {
variableMap.put(o, (ArrayList<Property>)variableField.get(o));
} catch (IllegalArgumentException | IllegalAccessException e) {
e.printStackTrace();
}
setLayout(new GridBagLayout());
getContentPane().setBackground(Cores.ALTERNAR_ESCURO);
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.BOTH;
c.insets = new Insets(0,0,0,0);
makeScrollPanel(c);
for (Object o : objects){
createInterface(o);
addInterfaceToScroll(interfaceList.get(objects.indexOf(o)), listNumber++);
}
informationPanel.setBackground(Cores.ALTERNAR_ESCURO);
c.gridy++;
add(informationPanel,c);
c.gridy++;
c.gridwidth = 2;
// c.insets = new Insets(10,0,0,0);
add(makeEditPanel(), c);
interfaceList.get(0).setSelected(true);
pack();
setVisible(true);
setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
}
private void createInterface(Object o) {
ObjectInterface oi = new ObjectInterface(o, variableMap.get(o));
interfaceList.add(oi);
}
private void addInterfaceToScroll(ObjectInterface oi, int position) {
GridBagConstraints c = new GridBagConstraints();
c.fill = GridBagConstraints.HORIZONTAL;
c.anchor = GridBagConstraints.NORTH;
c.gridy = position;
namePanel.add(oi.objectName,c);
c.gridy = 0;
informationPanel.add(oi.objectInformation, c);
}
private void removeInterfaceFromScroll(ObjectInterface oi){
namePanel.remove(oi.objectName);
informationPanel.remove(oi.objectInformation);
}
private void makeScrollPanel(GridBagConstraints c) {
namePanel = new JPanel();
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { 140 };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
namePanel.setLayout(layout);
scroll = new JScrollPane(namePanel);
scroll.setPreferredSize(new Dimension(160,
informationPanel.getPreferredSize().height));
add(scroll,c);
}
private JPanel makeEditPanel() {
JPanel panel = new JPanel();
panel.setBackground(Cores.FUNDO_CLARO);
panel.setBorder(new MatteBorder(3, 0, 0, 0, Cores.SEPARAR_ESCURO));
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { 160, informationPanel.getPreferredSize().width };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
panel.setLayout(layout);
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(10,0,10,0);
c.gridy = 0;
c.gridx = 0;
JButton newButton = new JButton("Novo");
newButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
try {
Object obj = objects.get(0).getClass().newInstance();
variableMap.put(obj, (ArrayList<Property>)variableField.get(obj));
// Puts object in the list
objects.add(obj);
// Creates the interface for the object
createInterface(obj);
// Adds interface to the scroll
addInterfaceToScroll(interfaceList.get(interfaceList.size()-1),
interfaceList.size()-1);
pack();
scroll.getVerticalScrollBar().setValue(
scroll.getVerticalScrollBar().getMaximum());
selectedInterface.setSelected(false);
interfaceList.get(interfaceList.size()-1).setSelected(true);
selectedInterface.saveObejct();
} catch (InstantiationException | IllegalAccessException
| IllegalArgumentException e) {
e.printStackTrace();
}
}
});
panel.add(newButton,c);
JPanel rightPanel = new JPanel();
rightPanel.setOpaque(false);
saveButton = new JButton("Salvar");
saveButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
selectedInterface.saveObejct();
}
});
c.gridx++;
rightPanel.add(saveButton, c);
upButton = new JButton(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/up_arrow.png"))));
upButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int position = objects.indexOf(selectedInterface.object);
if (position > 0) {
Collections.swap(objects, position, position-1);
Collections.swap(interfaceList, position, position-1);
removeInterfaceFromScroll(interfaceList.get(position));
removeInterfaceFromScroll(interfaceList.get(position-1));
addInterfaceToScroll(selectedInterface, position-1);
addInterfaceToScroll(interfaceList.get(position), position);
revalidate();
changeButtons();
}
}
});
rightPanel.add(upButton, c);
downButton = new JButton(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/down_arrow.png"))));
downButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
int position = objects.indexOf(selectedInterface.object);
if (position < interfaceList.size()-1) {
Collections.swap(objects, position, position+1);
Collections.swap(interfaceList, position, position+1);
removeInterfaceFromScroll(interfaceList.get(position));
removeInterfaceFromScroll(interfaceList.get(position+1));
addInterfaceToScroll(selectedInterface, position+1);
addInterfaceToScroll(interfaceList.get(position), position);
scroll.getVerticalScrollBar().setValue(32*38);
revalidate();
changeButtons();
}
}
});
c.gridx++;
rightPanel.add(downButton, c);
JButton deleteButton = new JButton("Delete");
c.gridx = 1;
panel.add(rightPanel,c);
return panel;
}
// Turns the save, up and down buttons on or off as needed
private void changeButtons() {
saveButton.setEnabled(!selectedInterface.isSaved());
upButton.setEnabled(interfaceList.indexOf(selectedInterface) != 0);
downButton.setEnabled(interfaceList.indexOf(selectedInterface)
!= interfaceList.size()-1);
}
private class ObjectInterface {
/**
* The object that this interface references.
* <br> All editing and saving of the object is done in the <class>Object
* Interface</class>.
*/
private Object object;
private JPanel objectName;
private JPanel objectInformation;
// JTextField that contains the name of the object. Needed because name is very important
private JTextField nameTextField;
// Boolean that says if the object has been saved after any changes
private boolean isSaved = true;
// The symbol to be added to the objectName when it is unsaved
private JLabel unsavedSignal = new JLabel(new ImageIcon(Toolkit.getDefaultToolkit().getImage(
GUI.class.getResource("/images/teste.png"))));
/**
* Maps the object's properties to an object that contains the property's value
* <br>This is the mapping that is made depending on the type of property:
* <br>
* <br> Nome : JTextField
* <br> Boolean : JCheckBox
* <br> Escolha : ButtonGroup
* <br> Number : JTextField
* <br> UnidadeList : HashMap<\Unidade, TroopFormattedTextField>
*/
private Map<Property, Object> variableMap = new HashMap<Property, Object>();
public ObjectInterface(Object object, List<Property> list) {
this.object = object;
createNamePanel(object.toString());
createInformationPanel(list);
}
private void createNamePanel(String s) {
objectName = new JPanel();
objectName.add(new JLabel(s));
objectName.setBackground(Cores.FUNDO_CLARO);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.RAISED));
objectName.addMouseListener(new MouseListener() {
public void mouseReleased(MouseEvent arg0) {}
public void mousePressed(MouseEvent arg0) {}
public void mouseExited(MouseEvent arg0) {}
public void mouseEntered(MouseEvent arg0) {}
public void mouseClicked(MouseEvent arg0) {
selectedInterface.setSelected(false);
setSelected(true);
}
});
}
private void createInformationPanel(List<Property> list) {
objectInformation = new JPanel(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(0, 10, 5, 10);
c.fill = GridBagConstraints.HORIZONTAL;
c.gridy = 0;
for (Property i : list) {
if (i.getClass().equals(Property_Nome.class)) {
objectInformation.add(makeNamePanel((Property_Nome) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Boolean.class)) {
objectInformation.add(makeBooleanPanel((Property_Boolean) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Escolha.class)) {
objectInformation.add(makeEscolhaPanel((Property_Escolha) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_Number.class)) {
objectInformation.add(makeNumberPanel((Property_Number) i), c);
c.gridy++;
} else if (i.getClass().equals(Property_UnidadeList.class)) {
objectInformation.add(makeUnidadeListPanel((Property_UnidadeList) i), c);
c.gridy++;
}
}
objectInformation.setBackground(Cores.ALTERNAR_ESCURO);
objectInformation.setVisible(false);
}
private JPanel makeNamePanel(Property_Nome variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
c.anchor = GridBagConstraints.WEST;
panel.add(new JLabel("Nome"), c);
nameTextField = new JTextField(16);
nameTextField.setText(variable.getName());
nameTextField.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.anchor = GridBagConstraints.EAST;
c.gridy++;
c.gridwidth = 2;
panel.add(nameTextField, c);
variableMap.put(variable, nameTextField);
return panel;
}
private JPanel makeBooleanPanel(Property_Boolean variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
JCheckBox checkBox = new JCheckBox();
checkBox.setOpaque(false);
checkBox.setSelected(variable.getValue());
checkBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
setSaved(false);
}
});
c.gridx++;
panel.add(checkBox, c);
variableMap.put(variable, checkBox);
return panel;
}
private JPanel makeEscolhaPanel(Property_Escolha variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
ButtonGroup buttonGroup = new ButtonGroup();
JPanel buttonPanel = new JPanel(new GridLayout(0,1));
buttonPanel.setOpaque(false);
for (String s : variable.getOptions()) {
JRadioButton button = new JRadioButton(s);
button.setOpaque(false);
button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
setSaved(false);
}
});
if (variable.isOption(s))
button.setSelected(true);
buttonGroup.add(button);
buttonPanel.add(button);
}
// Code for retrieving the selected value
// Enumeration<AbstractButton> enumeration = buttonGroup.getElements();
//
// while (enumeration.hasMoreElements()) {
//
// AbstractButton b = enumeration.nextElement();
//
// if (b.isSelected())
// System.out.println(b.getText());
//
// }
c.gridx++;
panel.add(buttonPanel, c);
variableMap.put(variable, buttonGroup);
return panel;
}
private JPanel makeNumberPanel(Property_Number variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = 0;
c.gridx = 0;
c.gridwidth = 1;
JLabel name = new JLabel(variable.getName());
panel.add(name, c);
JTextField txt = new JTextField(variable.getValue().toString());
txt.setDocument(new PlainDocument() {
@Override
public void insertString(int offset, String str, AttributeSet attr)
throws BadLocationException {
if (str == null)
return;
// Only does anything if it is a comma, period or number
if (str.charAt(0) == '.' || str.charAt(0) == ',' ||
Character.isDigit(str.charAt(0))) {
// If it is a comma or perido, check if it already has one
if (!Character.isDigit(str.charAt(0)) &&
super.getText(0, getLength()).contains(".")) {}
else
// If it does not and the inserted is a comma, add a period
if (str.charAt(0) == ',')
super.insertString(offset, ".", attr);
else
// Else, add the inserted character (period or number)
super.insertString(offset, str, attr);
}
}
});
txt.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent f) {
if (((JTextField) f.getSource()).getText().equals("")) {
((JTextField) f.getSource()).setText("1");
}
}
public void focusGained(FocusEvent arg0) {}
});
txt.setText(variable.getValue().toString());
txt.setColumns(5);
txt.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.gridx++;
panel.add(txt, c);
variableMap.put(variable, txt);
return panel;
}
private JPanel makeUnidadeListPanel(Property_UnidadeList variable) {
JPanel panel = makeDefaultPanel();
GridBagConstraints c = new GridBagConstraints();
c.insets = new Insets(5, 5, 5, 5);
c.gridy = -1;
c.gridx = 0;
c.gridwidth = 1;
Map<Unidade, TroopFormattedTextField> map = new HashMap<Unidade, TroopFormattedTextField>();
for (Unidade i : Unidade.values()) {
c.gridx = 0;
c.gridy++;
panel.add(new JLabel(i.nome()), c);
TroopFormattedTextField txt = new TroopFormattedTextField(9) {
public void go() {}
};
txt.setText(variable.get(i).toString());
txt.getDocument().addDocumentListener(new DocumentListener() {
public void removeUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void insertUpdate(DocumentEvent arg0) {
setSaved(false);
}
public void changedUpdate(DocumentEvent arg0) {}
});
c.gridx = 1;
panel.add(txt, c);
map.put(i, txt);
}
variableMap.put(variable, map);
return panel;
}
/**
* Creates the default panel, with adequate colors, borders and layout
*/
private JPanel makeDefaultPanel() {
JPanel panel = new JPanel();
panel.setBackground(Cores.FUNDO_CLARO);
panel.setBorder(new LineBorder(Cores.SEPARAR_CLARO));
GridBagLayout layout = new GridBagLayout();
layout.columnWidths = new int[] { , };
layout.rowHeights = new int[] { 20 };
layout.columnWeights = new double[] { 1, Double.MIN_VALUE };
layout.rowWeights = new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 };
panel.setLayout(layout);
//Adds a listener to turn 'isSaved' false
return panel;
}
private void setSaved(boolean saved) {
isSaved = saved;
if (saved)
objectName.remove(unsavedSignal);
else
objectName.add(unsavedSignal,0);
objectName.revalidate();
objectName.repaint();
changeButtons();
}
public void setSelected(boolean isSelected) {
if (isSelected) {
objectInformation.setVisible(true);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.LOWERED));
objectName.setBackground(Cores.FUNDO_ESCURO);
selectedInterface = this;
changeButtons();
} else {
objectInformation.setVisible(false);
objectName.setBorder(new SoftBevelBorder(SoftBevelBorder.RAISED));
objectName.setBackground(Cores.FUNDO_CLARO);
}
}
private void saveObejct() {
if (isUniqueName(nameTextField.getText())) {
for (Entry<Property, Object> i : variableMap.entrySet()) {
// Nome case
if (i.getKey().getClass().equals(Property_Nome.class)) {
// only does this if the name is different
if (!i.getKey().getName().equals(((JTextField) i.getValue()).getText())) {
i.getKey().setValue(((JTextField) i.getValue()).getText());
objectName.removeAll();
objectName.add(new JLabel(object.toString()));
objectName.revalidate();
objectName.repaint();
}
// Boolean case
} else if (i.getKey().getClass().equals(Property_Boolean.class)) {
i.getKey().setValue(((JCheckBox) i.getValue()).isSelected());
// Escolha case
} else if (i.getKey().getClass().equals(Property_Escolha.class)) {
Enumeration<AbstractButton> enumeration =
((ButtonGroup) i.getValue()).getElements();
while (enumeration.hasMoreElements()) {
AbstractButton b = enumeration.nextElement();
if (b.isSelected())
i.getKey().setValue(b.getText());
}
// Number case
} else if (i.getKey().getClass().equals(Property_Number.class)) {
i.getKey().setValue(((JTextField) i.getValue()).getText());
// UnidadeList case
} else if (i.getKey().getClass().equals(Property_UnidadeList.class)) {
Map<Unidade, BigDecimal> map = new HashMap<Unidade, BigDecimal>();
for (Entry<Unidade, TroopFormattedTextField> x
: ((HashMap<Unidade, TroopFormattedTextField>) i .getValue()).entrySet()) {
map.put(x.getKey(), x.getValue().getValue());
}
i.getKey().setValue(map);
} // else if (UnidadeList)
} // for (EntrySet)
setSaved(true);
} else {
String s = nameTextField.getText();
do {
// JOptionPane.showMessageDialog(null,
// nameTextField, "t", 1);
s = (String)(JOptionPane.showInputDialog(null,
new JLabel("<html>Esse nome j est sendo utilizado.<br>Favor escolher outro.</html>"),
"Nome j utilizado", JOptionPane.ERROR_MESSAGE, null, null, s));
} while (!isUniqueName(s));
if (s != null) {
nameTextField.setText(s);
saveObejct();
} else
setSaved(false);
// JOptionPane.showInputDialog(null);
// JOptionPane.showMessageDialog(null, "Esse nome j est sendo utilizado.\nFavor escolher outro.");
// nameTextField.requestFocus();
// nameTextField.selectAll();
}
}
private boolean isUniqueName(String s) {
for (Object o : objects)
if (o.toString().equals(s) && o != object)
return false;
return true;
}
private boolean isSaved() {
return isSaved;
}
public String toString(){
return object.toString();
}
}
public static void main(String args[]) {
File_Manager.read();
File_Manager.defineMundos();
Mundo_Reader.setMundoSelecionado(Mundo_Reader.getMundo(23));
File_Manager.defineModelos();
try {
EditDialog test = new EditDialog(Mundo_Reader.getMundoList(),
Mundo.class.getDeclaredField("variableList"));
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchFieldException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
| Addded Delete Button
| resources/custom_components/EditDialog.java | Addded Delete Button | <ide><path>esources/custom_components/EditDialog.java
<ide>
<ide> JButton deleteButton = new JButton("Delete");
<ide>
<add> deleteButton.addActionListener(new ActionListener() {
<add>
<add> public void actionPerformed(ActionEvent arg0) {
<add>
<add> //TODO change text
<add> int delete = JOptionPane.showConfirmDialog(null,
<add> new JLabel("Deseja deletar para sempre?"),
<add> "Confirmar", JOptionPane.YES_NO_OPTION);
<add>
<add> if (delete == JOptionPane.YES_OPTION) {
<add>
<add> int position = interfaceList.indexOf(selectedInterface);
<add>
<add> removeInterfaceFromScroll(selectedInterface);
<add>
<add> interfaceList.remove(selectedInterface);
<add> objects.remove(selectedInterface.object);
<add>
<add> for (int i = position; i < interfaceList.size(); i++) {
<add> removeInterfaceFromScroll(interfaceList.get(i));
<add> addInterfaceToScroll(interfaceList.get(i), i);
<add> }
<add>
<add> // Se o ltimo objeto foi deletado, seleciona o
<add> // anterior. Caso contrrio, o seguinte
<add> if (position == interfaceList.size())
<add> interfaceList.get(position-1).setSelected(true);
<add> else
<add> interfaceList.get(position).setSelected(true);
<add>
<add> changeButtons();
<add>
<add> }
<add>
<add> }
<add> });
<add>
<add> c.gridx++;
<add> rightPanel.add(deleteButton, c);
<add>
<ide> c.gridx = 1;
<ide> panel.add(rightPanel,c);
<ide> |
|
Java | mit | e51886bb3fce486cfa59a53638a5192a9f4105af | 0 | AndrewKostousov/kload | import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.rapidoid.net.Server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Properties;
public class HttpGateEntryPoint {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put("bootstrap.servers", "edi15:9092");
props.put("acks", "0");
props.put("retries", 0);
props.put("linger.ms", 20);
props.put("batch.size", 64 * 1000);
props.put("buffer.memory", 1000 * 1000 * 1000);
props.put("key.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("schema.registry.url", "http://edi15:8881");
String schemaString = "{\"type\": \"record\", " +
"\"name\": \"kevent\"," +
"\"fields\": [" +
// "{\"name\": \"timestamp\", \"type\": \"long\"}," +
"{\"name\": \"payload\", \"type\": \"bytes\"}" +
"]}";
Producer<String, GenericRecord> producer = new KafkaProducer<>(props);
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(schemaString);
Server server = new KHttpServer(schema, producer).listen(8888);
new BufferedReader(new InputStreamReader(System.in)).readLine();
server.shutdown();
producer.close();
}
} | src/main/java/HttpGateEntryPoint.java | import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.rapidoid.net.Server;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Properties;
public class HttpGateEntryPoint {
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put("bootstrap.servers", "edi15:9092");
props.put("acks", "0");
props.put("retries", 0);
props.put("linger.ms", 20);
props.put("batch.size", 64 * 1000);
props.put("buffer.memory", 512 * 1000 * 1000);
props.put("key.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
props.put("schema.registry.url", "http://edi15:8881");
String schemaString = "{\"type\": \"record\", " +
"\"name\": \"kevent\"," +
"\"fields\": [" +
// "{\"name\": \"timestamp\", \"type\": \"long\"}," +
"{\"name\": \"payload\", \"type\": \"bytes\"}" +
"]}";
Producer<String, GenericRecord> producer = new KafkaProducer<>(props);
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(schemaString);
Server server = new KHttpServer(schema, producer).listen(8888);
new BufferedReader(new InputStreamReader(System.in)).readLine();
server.shutdown();
producer.close();
}
} | tuning
| src/main/java/HttpGateEntryPoint.java | tuning | <ide><path>rc/main/java/HttpGateEntryPoint.java
<ide> props.put("retries", 0);
<ide> props.put("linger.ms", 20);
<ide> props.put("batch.size", 64 * 1000);
<del> props.put("buffer.memory", 512 * 1000 * 1000);
<add> props.put("buffer.memory", 1000 * 1000 * 1000);
<ide> props.put("key.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
<ide> props.put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
<ide> props.put("schema.registry.url", "http://edi15:8881"); |
|
Java | lgpl-2.1 | 98ef7f7141747b3a88fe786b1f4d16a457075db9 | 0 | OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs | package opendap.experiments;
import com.ontotext.trree.owlim_ext.SailImpl;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.ntriples.NTriplesWriter;
import org.openrdf.rio.trig.TriGWriter;
import org.openrdf.rio.trix.TriXWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
/**
* Created by IntelliJ IDEA.
* User: ndp
* Date: Aug 18, 2010
* Time: 1:39:37 PM
* To change this template use File | Settings | File Templates.
*/
public class RepoTest {
public static void main(String[] args) {
long startTime, endTime;
double elapsedTime;
String workingDir = System.getProperty("user.dir");
System.out.println("Current directory: "+workingDir);
startTime = new Date().getTime();
try {
SailRepository repo = setupRepository(workingDir);
loadStatements(repo,"test.trig");
repo.shutDown();
repo = setupRepository(workingDir);
System.out.println(showContexts(repo));
dropStatement(repo);
dumpRepository(repo,"test_drop.trig");
System.out.println(showContexts(repo));
repo.shutDown();
} catch (Exception e) {
System.err.println("Caught " + e.getClass().getName() + " in main(): "
+ e.getMessage());
e.printStackTrace(System.err);
}
finally {
endTime = new Date().getTime();
elapsedTime = (endTime - startTime) / 1000.0;
System.out.println("Elapsed Time: "+elapsedTime+"s");
}
}
private static SailRepository setupRepository(String cacheDir) throws RepositoryException {
String repositoryStorage = "owlim-storage";
System.out.println("Setting up Semantic Repository.");
SailImpl owlimSail = new com.ontotext.trree.owlim_ext.SailImpl();
SailRepository repo = new SailRepository(owlimSail);
System.out.println("Configuring Semantic Repository.");
File storageDir = new File(cacheDir);
owlimSail.setDataDir(storageDir);
System.out.println("Semantic Repository Data directory set to: " + cacheDir);
owlimSail.setParameter("storage-folder", repositoryStorage);
System.out.println("Semantic Repository 'storage-folder' set to: " + repositoryStorage);
String ruleSet;
ruleSet = "owl-horst";
owlimSail.setParameter("ruleset", ruleSet);
System.out.println("Semantic Repository 'ruleset' set to: " + ruleSet);
System.out.println("Initializing Semantic Repository.");
repo.initialize();
System.out.println("Semantic Repository Ready.");
return repo;
}
private static void loadStatements(SailRepository repo, String rdfFileName) throws RepositoryException, IOException, RDFParseException {
RepositoryConnection con = null;
try {
con = repo.getConnection();
File rdfFile = new File(rdfFileName);
con.add(rdfFile,"http://someBaseURI#",RDFFormat.TRIG);
con.commit();
}
finally {
if(con!=null) {
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
private static void dropStatement(SailRepository repo) throws RepositoryException {
ValueFactory valueFactory = repo.getValueFactory();
String rdfType = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
RepositoryConnection con = null;
String startingPoint = "http://test.opendap.org:8090/opendap/ioos/ECMWF_ERA-40_subset.ncml.rdf" ;
URI startingPointValue = valueFactory.createURI(startingPoint);
URI isa = valueFactory.createURI(rdfType);
URI startingPointsContext = valueFactory.createURI("http://iridl.ldeo.columbia.edu/ontologies/rdfcache.owl#startingPoints");
URI startingPointType = valueFactory.createURI("http://iridl.ldeo.columbia.edu/ontologies/rdfcache.owl#StartingPoint");
try {
con = repo.getConnection();
con.remove(startingPointValue, isa, startingPointType, startingPointsContext);
con.commit();
System.out.println("Removed starting point " + startingPoint + " from the repository. (N-Triple: <" + startingPointValue + "> <" + isa
+ "> " + "<" + startingPointType + "> " + "<" + startingPointsContext + "> )");
}
finally {
if(con!=null) {
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
public static String showContexts(SailRepository repository) throws RepositoryException {
RepositoryConnection con = null;
String msg;
try {
con = repository.getConnection();
msg = showContexts(con);
} finally {
if(con!=null){
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
return msg;
}
public static String showContexts(RepositoryConnection con) throws RepositoryException {
String msg = "\nRepository ContextIDs:\n";
RepositoryResult<Resource> contextIds = con.getContextIDs();
for(Resource contextId : contextIds.asList()){
msg += " "+contextId+"\n";
}
return msg;
}
public static void dumpRepository(RepositoryConnection con, String filename) {
// export repository to an n-triple file
File outrps = new File(filename); // hard copy of repository
try {
System.out.println("\nDumping repository to: '"+filename+"' ");
FileOutputStream myFileOutputStream = new FileOutputStream(outrps);
if (filename.endsWith("nt")) {
NTriplesWriter myNTRiplesWriter = new NTriplesWriter(
myFileOutputStream);
con.export(myNTRiplesWriter);
myNTRiplesWriter.startRDF();
myNTRiplesWriter.endRDF();
}
if (filename.endsWith("trix")) {
TriXWriter myTriXWriter = new TriXWriter(myFileOutputStream);
con.export(myTriXWriter);
myTriXWriter.startRDF();
myTriXWriter.endRDF();
}
if (filename.endsWith("trig")) {
TriGWriter myTriGWriter = new TriGWriter(myFileOutputStream);
con.export(myTriGWriter);
myTriGWriter.startRDF();
myTriGWriter.endRDF();
}
} catch (Exception e) {
System.err.println("Failed to dump repository! msg: "+e.getMessage());
}
}
public static void dumpRepository(SailRepository owlse2, String filename) throws RepositoryException {
RepositoryConnection con = null;
try {
con = owlse2.getConnection();
dumpRepository(con, filename);
} finally {
if(con!=null){
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
}
| src/opendap/experiments/RepoTest.java | package opendap.experiments;
import com.ontotext.trree.owlim_ext.SailImpl;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.repository.sail.SailRepository;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.openrdf.rio.ntriples.NTriplesWriter;
import org.openrdf.rio.trig.TriGWriter;
import org.openrdf.rio.trix.TriXWriter;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Date;
/**
* Created by IntelliJ IDEA.
* User: ndp
* Date: Aug 18, 2010
* Time: 1:39:37 PM
* To change this template use File | Settings | File Templates.
*/
public class RepoTest {
public static void main(String[] args) {
long startTime, endTime;
double elapsedTime;
String workingDir = System.getProperty("user.dir");
System.out.println("Current directory: "+workingDir);
startTime = new Date().getTime();
try {
SailRepository repo = setupRepository(workingDir);
loadStatements(repo,"test.trig");
System.out.println(showContexts(repo));
dropStatement(repo);
dumpRepository(repo,"test_drop.trig");
System.out.println(showContexts(repo));
} catch (Exception e) {
System.err.println("Caught " + e.getClass().getName() + " in main(): "
+ e.getMessage());
e.printStackTrace(System.err);
}
finally {
endTime = new Date().getTime();
elapsedTime = (endTime - startTime) / 1000.0;
System.out.println("Elapsed Time: "+elapsedTime+"s");
}
}
private static SailRepository setupRepository(String cacheDir) throws RepositoryException {
String repositoryStorage = "owlim-storage";
System.out.println("Setting up Semantic Repository.");
SailImpl owlimSail = new com.ontotext.trree.owlim_ext.SailImpl();
SailRepository repo = new SailRepository(owlimSail);
System.out.println("Configuring Semantic Repository.");
File storageDir = new File(cacheDir);
owlimSail.setDataDir(storageDir);
System.out.println("Semantic Repository Data directory set to: " + cacheDir);
owlimSail.setParameter("storage-folder", repositoryStorage);
System.out.println("Semantic Repository 'storage-folder' set to: " + repositoryStorage);
String ruleSet;
ruleSet = "owl-horst";
owlimSail.setParameter("ruleset", ruleSet);
System.out.println("Semantic Repository 'ruleset' set to: " + ruleSet);
System.out.println("Initializing Semantic Repository.");
repo.initialize();
System.out.println("Semantic Repository Ready.");
return repo;
}
private static void loadStatements(SailRepository repo, String rdfFileName) throws RepositoryException, IOException, RDFParseException {
RepositoryConnection con = null;
try {
con = repo.getConnection();
File rdfFile = new File(rdfFileName);
con.add(rdfFile,"http://someBaseURI#",RDFFormat.TRIG);
con.commit();
}
finally {
if(con!=null) {
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
private static void dropStatement(SailRepository repo) throws RepositoryException {
ValueFactory valueFactory = repo.getValueFactory();
String rdfType = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
RepositoryConnection con = null;
String startingPoint = "http://test.opendap.org:8090/opendap/ioos/ECMWF_ERA-40_subset.ncml.rdf" ;
URI startingPointValue = valueFactory.createURI(startingPoint);
URI isa = valueFactory.createURI(rdfType);
URI startingPointsContext = valueFactory.createURI("http://iridl.ldeo.columbia.edu/ontologies/rdfcache.owl#startingPoints");
URI startingPointType = valueFactory.createURI("http://iridl.ldeo.columbia.edu/ontologies/rdfcache.owl#StartingPoint");
try {
con = repo.getConnection();
con.remove(startingPointValue, isa, startingPointType, startingPointsContext);
con.commit();
System.out.println("Removed starting point " + startingPoint + " from the repository. (N-Triple: <" + startingPointValue + "> <" + isa
+ "> " + "<" + startingPointType + "> " + "<" + startingPointsContext + "> )");
}
finally {
if(con!=null) {
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
public static String showContexts(SailRepository repository) throws RepositoryException {
RepositoryConnection con = null;
String msg;
try {
con = repository.getConnection();
msg = showContexts(con);
} finally {
if(con!=null){
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
return msg;
}
public static String showContexts(RepositoryConnection con) throws RepositoryException {
String msg = "\nRepository ContextIDs:\n";
RepositoryResult<Resource> contextIds = con.getContextIDs();
for(Resource contextId : contextIds.asList()){
msg += " "+contextId+"\n";
}
return msg;
}
public static void dumpRepository(RepositoryConnection con, String filename) {
// export repository to an n-triple file
File outrps = new File(filename); // hard copy of repository
try {
System.out.println("\nDumping repository to: '"+filename+"' ");
FileOutputStream myFileOutputStream = new FileOutputStream(outrps);
if (filename.endsWith("nt")) {
NTriplesWriter myNTRiplesWriter = new NTriplesWriter(
myFileOutputStream);
con.export(myNTRiplesWriter);
myNTRiplesWriter.startRDF();
myNTRiplesWriter.endRDF();
}
if (filename.endsWith("trix")) {
TriXWriter myTriXWriter = new TriXWriter(myFileOutputStream);
con.export(myTriXWriter);
myTriXWriter.startRDF();
myTriXWriter.endRDF();
}
if (filename.endsWith("trig")) {
TriGWriter myTriGWriter = new TriGWriter(myFileOutputStream);
con.export(myTriGWriter);
myTriGWriter.startRDF();
myTriGWriter.endRDF();
}
} catch (Exception e) {
System.err.println("Failed to dump repository! msg: "+e.getMessage());
}
}
public static void dumpRepository(SailRepository owlse2, String filename) throws RepositoryException {
RepositoryConnection con = null;
try {
con = owlse2.getConnection();
dumpRepository(con, filename);
} finally {
if(con!=null){
try {
con.close(); //close connection first
}
catch(RepositoryException e){
System.err.println("Failed to close repository connection. Msg: "+e.getMessage());
}
}
}
}
}
| olfs:
| src/opendap/experiments/RepoTest.java | olfs: | <ide><path>rc/opendap/experiments/RepoTest.java
<ide> try {
<ide>
<ide> SailRepository repo = setupRepository(workingDir);
<del>
<ide> loadStatements(repo,"test.trig");
<del>
<add> repo.shutDown();
<add>
<add> repo = setupRepository(workingDir);
<add>
<ide> System.out.println(showContexts(repo));
<ide>
<ide> dropStatement(repo);
<ide>
<ide> System.out.println(showContexts(repo));
<ide>
<del>
<del>
<add> repo.shutDown();
<ide>
<ide> } catch (Exception e) {
<ide> System.err.println("Caught " + e.getClass().getName() + " in main(): " |
|
Java | mit | 4307aa1095cb5b7420e6f8f799f4ca0f8c5f0d7f | 0 | McJty/RFTools | package mcjty.rftools.blocks.screens.modulesclient;
import mcjty.rftools.api.screens.*;
import mcjty.rftools.api.screens.data.IModuleDataBoolean;
import mcjty.rftools.blocks.screens.modulesclient.helper.ScreenTextHelper;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class RedstoneClientScreenModule implements IClientScreenModule<IModuleDataBoolean> {
private String line = "";
private String yestext = "on";
private String notext = "off";
private int color = 0xffffff;
private int yescolor = 0xffffff;
private int nocolor = 0xffffff;
private int dim = 0;
private ITextRenderHelper labelCache = new ScreenTextHelper();
@Override
public TransformMode getTransformMode() {
return TransformMode.TEXT;
}
@Override
public int getHeight() {
return 10;
}
@Override
public void render(IModuleRenderHelper renderHelper, FontRenderer fontRenderer, int currenty, IModuleDataBoolean screenData, ModuleRenderInfo renderInfo) {
GlStateManager.disableLighting();
int xoffset;
if (!line.isEmpty()) {
labelCache.setup(line, 160, renderInfo);
labelCache.renderText(0, currenty, color, renderInfo);
xoffset = 7 + 40;
} else {
xoffset = 7;
}
String text;
int col;
if (screenData != null) {
boolean rs = screenData.get();
text = rs ? yestext : notext;
col = rs ? yescolor : nocolor;
} else {
text = "<invalid>";
col = 0xff0000;
}
renderHelper.renderText(xoffset, currenty, col, renderInfo, text);
}
@Override
public void mouseClick(World world, int x, int y, boolean clicked) {
}
@Override
public void createGui(IModuleGuiBuilder guiBuilder) {
guiBuilder
.label("Label:").text("text", "Label text").color("color", "Color for the label").nl()
.label("Yes:").text("yestext", "Positive text").color("yescolor", "Color for the positive text").nl()
.label("No:").text("notext", "Negative text").color("nocolor", "Color for the negative text").nl()
.choices("align", "Label alignment", "Left", "Center", "Right").nl()
.label("Block:").block("monitor").nl();
}
@Override
public void setupFromNBT(NBTTagCompound tagCompound, int dim, BlockPos pos) {
if (tagCompound != null) {
line = tagCompound.getString("text");
if (tagCompound.hasKey("yestext")) {
yestext = tagCompound.getString("yestext");
}
if (tagCompound.hasKey("notext")) {
notext = tagCompound.getString("notext");
}
if (tagCompound.hasKey("color")) {
color = tagCompound.getInteger("color");
} else {
color = 0xffffff;
}
if (tagCompound.hasKey("yescolor")) {
yescolor = tagCompound.getInteger("yescolor");
} else {
yescolor = 0xffffff;
}
if (tagCompound.hasKey("nocolor")) {
nocolor = tagCompound.getInteger("nocolor");
} else {
nocolor = 0xffffff;
}
if (tagCompound.hasKey("align")) {
String alignment = tagCompound.getString("align");
labelCache.align(TextAlign.get(alignment));
} else {
labelCache.align(TextAlign.ALIGN_LEFT);
}
}
}
@Override
public boolean needsServerData() {
return true;
}
}
| src/main/java/mcjty/rftools/blocks/screens/modulesclient/RedstoneClientScreenModule.java | package mcjty.rftools.blocks.screens.modulesclient;
import mcjty.rftools.api.screens.*;
import mcjty.rftools.api.screens.data.IModuleDataBoolean;
import mcjty.rftools.blocks.screens.modulesclient.helper.ScreenTextHelper;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
public class RedstoneClientScreenModule implements IClientScreenModule<IModuleDataBoolean> {
private String line = "";
private String yestext = "on";
private String notext = "off";
private int color = 0xffffff;
private int yescolor = 0xffffff;
private int nocolor = 0xffffff;
private int dim = 0;
private ITextRenderHelper labelCache = new ScreenTextHelper();
@Override
public TransformMode getTransformMode() {
return TransformMode.TEXT;
}
@Override
public int getHeight() {
return 10;
}
@Override
public void render(IModuleRenderHelper renderHelper, FontRenderer fontRenderer, int currenty, IModuleDataBoolean screenData, ModuleRenderInfo renderInfo) {
GlStateManager.disableLighting();
int xoffset;
if (!line.isEmpty()) {
labelCache.setup(line, 160, renderInfo);
labelCache.renderText(0, currenty, color, renderInfo);
xoffset = 7 + 40;
} else {
xoffset = 7;
}
String text;
int col;
if (screenData != null) {
boolean rs = screenData.get();
text = rs ? yestext : notext;
col = rs ? yescolor : nocolor;
} else {
text = "<invalid>";
col = 0xff0000;
}
if (renderInfo.font != null) {
float r = (col >> 16 & 255) / 255.0f;
float g = (col >> 8 & 255) / 255.0f;
float b = (col & 255) / 255.0f;
renderInfo.font.drawString(xoffset, 128 - currenty, text, 0.25f, 0.25f, -512f-40f, r, g, b, 1.0f);
} else {
fontRenderer.drawString(text, xoffset, currenty, col);
}
}
@Override
public void mouseClick(World world, int x, int y, boolean clicked) {
}
@Override
public void createGui(IModuleGuiBuilder guiBuilder) {
guiBuilder
.label("Label:").text("text", "Label text").color("color", "Color for the label").nl()
.label("Yes:").text("yestext", "Positive text").color("yescolor", "Color for the positive text").nl()
.label("No:").text("notext", "Negative text").color("nocolor", "Color for the negative text").nl()
.choices("align", "Label alignment", "Left", "Center", "Right").nl()
.label("Block:").block("monitor").nl();
}
@Override
public void setupFromNBT(NBTTagCompound tagCompound, int dim, BlockPos pos) {
if (tagCompound != null) {
line = tagCompound.getString("text");
if (tagCompound.hasKey("yestext")) {
yestext = tagCompound.getString("yestext");
}
if (tagCompound.hasKey("notext")) {
notext = tagCompound.getString("notext");
}
if (tagCompound.hasKey("color")) {
color = tagCompound.getInteger("color");
} else {
color = 0xffffff;
}
if (tagCompound.hasKey("yescolor")) {
yescolor = tagCompound.getInteger("yescolor");
} else {
yescolor = 0xffffff;
}
if (tagCompound.hasKey("nocolor")) {
nocolor = tagCompound.getInteger("nocolor");
} else {
nocolor = 0xffffff;
}
if (tagCompound.hasKey("align")) {
String alignment = tagCompound.getString("align");
labelCache.align(TextAlign.get(alignment));
} else {
labelCache.align(TextAlign.ALIGN_LEFT);
}
}
}
@Override
public boolean needsServerData() {
return true;
}
}
| Forgot
| src/main/java/mcjty/rftools/blocks/screens/modulesclient/RedstoneClientScreenModule.java | Forgot | <ide><path>rc/main/java/mcjty/rftools/blocks/screens/modulesclient/RedstoneClientScreenModule.java
<ide> text = "<invalid>";
<ide> col = 0xff0000;
<ide> }
<del> if (renderInfo.font != null) {
<del> float r = (col >> 16 & 255) / 255.0f;
<del> float g = (col >> 8 & 255) / 255.0f;
<del> float b = (col & 255) / 255.0f;
<del> renderInfo.font.drawString(xoffset, 128 - currenty, text, 0.25f, 0.25f, -512f-40f, r, g, b, 1.0f);
<del> } else {
<del> fontRenderer.drawString(text, xoffset, currenty, col);
<del> }
<add> renderHelper.renderText(xoffset, currenty, col, renderInfo, text);
<ide> }
<ide>
<ide> @Override |
|
Java | mit | d990b9faeb2a57fa3e09858ee349f5d1d14c75e2 | 0 | SWE443-TeamRed/open-bank,SWE443-TeamRed/open-bank,SWE443-TeamRed/open-bank | /**
* Created by daniel on 4/11/17.
*/
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import spark.Request;
import spark.Response;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import static spark.Spark.*;
public class SparkServer {
private static final Logger logger = Logger.getLogger(SparkServer.class.getName());
static boolean BANKMODE = true;
static Transaction transaction = null;
static private JsonPersistency jsonPersistency;
static int i = 1;
static int j = 1;
static Map<Integer, Account> accountMap;
static Bank bank;
public static void main(String[] args) {
Runtime.getRuntime().addShutdownHook(new ShutdownThread());
jsonPersistency = new JsonPersistency();
bank = jsonPersistency.bankFromJson("OpenBank");
if(bank == null) {
bank = new Bank().withBankName("OpenBank");
logger.info("Creating new Bank: " + bank.getBankName());
}
else
logger.info("Loaded bank: " + bank.getBankName());
apiLogSetup();
accountMapSetup();
before("/*", (q, a) -> logger.info("Received api call from ip: " + q.ip()
+ "\n\t" + q.pathInfo()
+ "\n\tMessage Content"
+ "\n\tcontentType: " + q.contentType()
+ "\n\theaders: " + q.headers().toString()
+ "\n\tquery params: " + q.queryParams().toString()));
get("/", (Request request, Response response) -> {
return "TeamRed OpenBank";
});
path("/admin", () -> {
get("", (Request request, Response response) -> {return "<HTML>\n" +
"<HEAD>\n" +
"<TITLE>Admin Page Place Holder</TITLE>\n" +
"</HEAD>\n" +
"<BODY BGCOLOR=\"FFFFFF\">\n" +
"<HR>\n" +
"This is a place holder for the admin page\n" +
"<H1>Header Example</H1>\n" +
"<H2>Medium Header Example</H2>\n" +
"<P>Paragraph Example\n" +
"<P><B>Bold Paragraph Example</B>\n" +
"<BR><B><I>This is a new sentence without a paragraph break, in bold italics.</I></B>\n" +
"<HR>\n" +
"</BODY>\n" +
"</HTML>";
});
path("/passwordReset", () -> {
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("newPassword")
&& request.queryParams().contains("id")) {
String newPassword = request.queryParams("newPassword");
String id = request.queryParams("id");
if(bank.findUserByID(id) != null) {
bank.findUserByID(id).withPassword(newPassword);
if (bank.findUserByID(id).getPassword().equals(newPassword))
responseJSON.put("request", "successful");
else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "password not successfully changed");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "user could not be found");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
});
path("/api", () -> {
path("/login", () -> {
get("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id")) {
String id = request.queryParams("id");
responseJSON.put("isUserLoggedIn", bank.findUserByID(id).isLoggedIn());
}
return responseJSON;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("username") && request.queryParams().contains("password")) {
String username = request.queryParams("username");
String password = request.queryParams("password");
String id = bank.Login(username, password);
if(id != null) {
bank.findUserByID(id).login(id, password);
if (bank.findUserByID(id).isLoggedIn()) {
responseJSON.put("authentication", true);
responseJSON.put("userID", id);
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason", "failed to login the user");
}
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason", "user could not be found");
}
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/logout", () -> {
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id")) {
String id = request.queryParams("id");
if (id != null) {
if(bank.findUserByID(id) != null) {
if (bank.findUserByID(id).logout())
responseJSON.put("request", "successful");
else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "failed to logout the user");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "user could not be found");
}
} else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "null user id");
}
}else {
responseJSON.put("authentication", false);
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/user", () -> {
get("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
responseJSON.put("userList", bank.getCustomerUser());
return responseJSON;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
String name = "";
String username = "";
String password = "";
String phoneNumber = "";
boolean isAdmin = false;
if(request.queryParams().contains("name")
&& request.queryParams().contains("username")
&& request.queryParams().contains("password")
&& request.queryParams().contains("isAdmin")
&& request.queryParams().contains("phoneNumber")) {
name = request.queryParams("name");
username = request.queryParams("username");
password = request.queryParams("password");
isAdmin = Boolean.parseBoolean(request.queryParams("isAdmin"));
phoneNumber = request.queryParams("phoneNumber");
User user = new User()
.withAccount()
.withName(name)
.withUsername(username)
.withPhone(phoneNumber)
.withUserID("" + j)
.withPassword(password)
.withIsAdmin(isAdmin);
bank.withCustomerUser(user);
j++;
if(bank.findUserByID(user.getUserID()) != null) {
logger.info("Added User: " + user.toString());
responseJSON.put("request", "successful");
responseJSON.put("userID", bank.findUserByID(user.getUserID()).getUserID());
}
else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank failed to create user");
}
}else {
responseJSON.put("request","failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/account", () -> {
get("", (Request request, Response response) -> {
JSONArray jsonArray = new JSONArray();
JSONObject accountJson = new JSONObject();
if (request.queryParams().contains("id")) {
String id = request.queryParams("id");
if(request.queryParams().contains("accountID")) {
int accountID = Integer.parseInt(request.queryParams("accountID"));
Account account = bank.findAccountByID(accountID);
if (account != null) {
accountJson.put("accountNumber", account.getAccountnum());
accountJson.put("accountType", account.getType());
accountJson.put("balance", account.getBalance());
jsonArray.add(accountJson);
} else {
accountJson.put("request", "failed");
accountJson.put("reason", "bank failed to locate account");
jsonArray.add(accountJson);
}
}else {
AccountSet accounts = bank.findUserByID(id).getAccount();
if(accounts != null) {
for (Account account : accounts) {
JSONObject multAccountJson = new JSONObject();
logger.info("Account: " + account.toString());
if (account != null) {
multAccountJson.put("accountNumber", account.getAccountnum());
multAccountJson.put("accountType", account.getType());
multAccountJson.put("balance", account.getBalance());
jsonArray.add(multAccountJson);
}
}
} else {
accountJson.put("request", "failed");
accountJson.put("reason","user with id " + id + " does not exist");
jsonArray.add(accountJson);
}
}
}else {
accountJson.put("request", "failed");
accountJson.put("reason","missing required parameters in body");
jsonArray.add(accountJson);
}
return jsonArray;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if (request.queryParams().contains("id")
&& request.queryParams().contains("accountType")
&& request.queryParams().contains("initialBalance")) {
String id = request.queryParams("id");
String accountType = request.queryParams("accountType");
Double initialBalance = Double.parseDouble(request.queryParams("initialBalance"));
AccountTypeEnum accountTypeEnum = null;
if(accountType.equals("CHECKING")) {
logger.info("Checking found");
accountTypeEnum = AccountTypeEnum.CHECKING;
} else if(accountType.equals("SAVINGS")) {
logger.info("Savings found");
accountTypeEnum = AccountTypeEnum.SAVINGS;
} else {
logger.info("None found");
}
if(bank.findUserByID(id) != null) {
Account account = bank.findUserByID(id).createAccount()
.withAccountnum(i)
.withOwner(bank.findUserByID(id))
.withType(accountTypeEnum)
.withBalance(initialBalance);
if (account != null) {
responseJSON.put("request", "successful");
responseJSON.put("accountNum", account.getAccountnum());
// responseJSON.put("test", account.toString());
i++;
} else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank failed to create account");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank id was null");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/transaction", () -> {
get("", (Request request, Response response) -> {
String id = "";
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id"))
{
id = request.queryParams("id");
} else {
responseJSON.put("request", "failed");
return responseJSON;
}
return responseJSON.put("request", "success");
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
double amount = 0;
int fromAccountId = 0;
int toAccountId = 0;
String transferType = "";
if(request.queryParams().contains("amount")
&& request.queryParams().contains("fromAccountId")
&& request.queryParams().contains("toAccountId")
&& request.queryParams().contains("transferType")) {
amount = Double.parseDouble(request.queryParams("amount"));
fromAccountId = Integer.parseInt(request.queryParams("fromAccountId"));
toAccountId = Integer.parseInt(request.queryParams("toAccountId"));
transferType = request.queryParams("transferType");
TransactionTypeEnum transactionTypeEnum = TransactionTypeEnum.valueOf(transferType);
logger.info("fromAccount: " + accountMap.get(fromAccountId).toString());
logger.info("toAccount: " + accountMap.get(toAccountId).toString());
// Transaction transaction = new Transaction()
// .withAmount(amount)
// .withCreationdate(new Date())
// .withFromAccount(accountMap.get(fromAccountId))
// .withToAccount(accountMap.get(toAccountId))
// .withTransType(transactionTypeEnum)
// .withTime(new Date());
Transaction transaction = accountMap.get(fromAccountId).createCredit()
.withAmount(amount)
.withCreationdate(new Date())
.withFromAccount(accountMap.get(fromAccountId))
.withToAccount(accountMap.get(toAccountId))
.withTransType(transactionTypeEnum)
.withTime(new Date());;
accountMap.get(fromAccountId).transferToAccount(amount, accountMap.get(toAccountId), "");
responseJSON.put("request", "successful");
responseJSON.put("transaction", transaction.toString());
responseJSON.put("transactionFrom", transaction.getFromAccount().toString());
responseJSON.put("transactionTo", transaction.getToAccount().toString());
return responseJSON;
}
responseJSON.put("request", "failed");
return responseJSON;
});
});
});
}
private static void accountMapSetup() {
accountMap = new HashMap<>();
}
private static void apiLogSetup() {
try {
FileHandler fh;
fh = new FileHandler("api.log", true);
SimpleFormatter formatter = new SimpleFormatter();
fh.setFormatter(formatter);
logger.addHandler(fh);
logger.info("*************************************New Session Started*************************************");
} catch (IOException ioe) {};
}
static class ShutdownThread extends Thread {
ShutdownThread() {}
public void run() {
jsonPersistency.bankToJson(bank);
logger.info("*************************************Shutting Down Session*************************************");
}
}
} | OpenBankSparkServer/src/main/java/SparkServer.java | /**
* Created by daniel on 4/11/17.
*/
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import spark.Request;
import spark.Response;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.FileHandler;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import static spark.Spark.*;
public class SparkServer {
private static final Logger logger = Logger.getLogger(SparkServer.class.getName());
static boolean BANKMODE = true;
static Transaction transaction = null;
static private JsonPersistency jsonPersistency;
static int i = 1;
static int j = 1;
static Map<Integer, Account> accountMap;
static Bank bank;
public static void main(String[] args) {
Runtime.getRuntime().addShutdownHook(new ShutdownThread());
jsonPersistency = new JsonPersistency();
bank = jsonPersistency.bankFromJson("OpenBank");
if(bank == null) {
bank = new Bank().withBankName("OpenBank");
logger.info("Creating new Bank: " + bank.getBankName());
}
else
logger.info("Loaded bank: " + bank.getBankName());
apiLogSetup();
accountMapSetup();
before("/*", (q, a) -> logger.info("Received api call from ip: " + q.ip()
+ "\n\t" + q.pathInfo()
+ "\n\tMessage Content"
+ "\n\tcontentType: " + q.contentType()
+ "\n\theaders: " + q.headers().toString()
+ "\n\tquery params: " + q.queryParams().toString()));
get("/", (Request request, Response response) -> {
return "TeamRed OpenBank";
});
path("/admin", () -> {
get("", (Request request, Response response) -> {return "<HTML>\n" +
"<HEAD>\n" +
"<TITLE>Admin Page Place Holder</TITLE>\n" +
"</HEAD>\n" +
"<BODY BGCOLOR=\"FFFFFF\">\n" +
"<HR>\n" +
"This is a place holder for the admin page\n" +
"<H1>Header Example</H1>\n" +
"<H2>Medium Header Example</H2>\n" +
"<P>Paragraph Example\n" +
"<P><B>Bold Paragraph Example</B>\n" +
"<BR><B><I>This is a new sentence without a paragraph break, in bold italics.</I></B>\n" +
"<HR>\n" +
"</BODY>\n" +
"</HTML>";
});
path("/passwordReset", () -> {
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("newPassword")
&& request.queryParams().contains("id")) {
String newPassword = request.queryParams("newPassword");
String id = request.queryParams("id");
if(bank.findUserByID(id) != null) {
bank.findUserByID(id).withPassword(newPassword);
if (bank.findUserByID(id).getPassword().equals(newPassword))
responseJSON.put("request", "successful");
else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "password not successfully changed");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "user could not be found");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
});
path("/api", () -> {
path("/login", () -> {
get("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id")) {
String id = request.queryParams("id");
responseJSON.put("isUserLoggedIn", bank.findUserByID(id).isLoggedIn());
}
return responseJSON;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("username") && request.queryParams().contains("password")) {
String username = request.queryParams("username");
String password = request.queryParams("password");
String id = bank.Login(username, password);
if(id != null) {
bank.findUserByID(id).login(id, password);
if (bank.findUserByID(id).isLoggedIn()) {
responseJSON.put("authentication", true);
responseJSON.put("userID", id);
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason", "failed to login the user");
}
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason", "user could not be found");
}
} else {
responseJSON.put("authentication", false);
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/logout", () -> {
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id")) {
String id = request.queryParams("id");
if (id != null) {
if(bank.findUserByID(id) != null) {
if (bank.findUserByID(id).logout())
responseJSON.put("request", "successful");
else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "failed to logout the user");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "user could not be found");
}
} else {
responseJSON.put("request", "failed");
responseJSON.put("reason", "null user id");
}
}else {
responseJSON.put("authentication", false);
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/user", () -> {
get("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
responseJSON.put("userList", bank.getCustomerUser());
return responseJSON;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
String name = "";
String username = "";
String password = "";
String phoneNumber = "";
boolean isAdmin = false;
if(request.queryParams().contains("name")
&& request.queryParams().contains("username")
&& request.queryParams().contains("password")
&& request.queryParams().contains("isAdmin")
&& request.queryParams().contains("phoneNumber")) {
name = request.queryParams("name");
username = request.queryParams("username");
password = request.queryParams("password");
isAdmin = Boolean.parseBoolean(request.queryParams("isAdmin"));
phoneNumber = request.queryParams("phoneNumber");
User user = new User()
.withAccount()
.withName(name)
.withUsername(username)
.withPhone(phoneNumber)
.withUserID("" + j)
.withPassword(password)
.withIsAdmin(isAdmin);
bank.withCustomerUser(user);
j++;
if(bank.findUserByID(user.getUserID()) != null) {
logger.info("Added User: " + user.toString());
responseJSON.put("request", "successful");
responseJSON.put("userID", bank.findUserByID(user.getUserID()).getUserID());
}
else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank failed to create user");
}
}else {
responseJSON.put("request","failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/account", () -> {
get("", (Request request, Response response) -> {
JSONArray jsonArray = new JSONArray();
JSONObject accountJson = new JSONObject();
if (request.queryParams().contains("id")) {
String id = request.queryParams("id");
if(request.queryParams().contains("accountID")) {
int accountID = Integer.parseInt(request.queryParams("accountID"));
Account account = bank.findAccountByID(accountID);
if (account != null) {
accountJson.put("accountNumber", account.getAccountnum());
accountJson.put("accountType", account.getType());
accountJson.put("balance", account.getBalance());
jsonArray.add(accountJson);
} else {
accountJson.put("request", "failed");
accountJson.put("reason", "bank failed to locate account");
jsonArray.add(accountJson);
}
}else {
AccountSet accounts = bank.findUserByID(id).getAccount();
if(accounts != null) {
for (Account account : accounts) {
JSONObject multAccountJson = new JSONObject();
logger.info("Account: " + account.toString());
if (account != null) {
multAccountJson.put("accountNumber", account.getAccountnum());
multAccountJson.put("accountType", account.getType());
multAccountJson.put("balance", account.getBalance());
jsonArray.add(multAccountJson);
}
}
} else {
accountJson.put("request", "failed");
accountJson.put("reason","user with id " + id + " does not exist");
}
}
}else {
accountJson.put("request", "failed");
accountJson.put("reason","missing required parameters in body");
jsonArray.add(accountJson);
}
return jsonArray;
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
if (request.queryParams().contains("id")
&& request.queryParams().contains("accountType")
&& request.queryParams().contains("initialBalance")) {
String id = request.queryParams("id");
String accountType = request.queryParams("accountType");
Double initialBalance = Double.parseDouble(request.queryParams("initialBalance"));
AccountTypeEnum accountTypeEnum = null;
if(accountType.equals("CHECKING")) {
logger.info("Checking found");
accountTypeEnum = AccountTypeEnum.CHECKING;
} else if(accountType.equals("SAVINGS")) {
logger.info("Savings found");
accountTypeEnum = AccountTypeEnum.SAVINGS;
} else {
logger.info("None found");
}
if(bank.findUserByID(id) != null) {
Account account = bank.findUserByID(id).createAccount()
.withAccountnum(i)
.withOwner(bank.findUserByID(id))
.withType(accountTypeEnum)
.withBalance(initialBalance);
if (account != null) {
responseJSON.put("request", "successful");
responseJSON.put("accountNum", account.getAccountnum());
// responseJSON.put("test", account.toString());
i++;
} else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank failed to create account");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","bank id was null");
}
}else {
responseJSON.put("request", "failed");
responseJSON.put("reason","missing required parameters in body");
}
return responseJSON;
});
});
path("/transaction", () -> {
get("", (Request request, Response response) -> {
String id = "";
JSONObject responseJSON = new JSONObject();
if(request.queryParams().contains("id"))
{
id = request.queryParams("id");
} else {
responseJSON.put("request", "failed");
return responseJSON;
}
return responseJSON.put("request", "success");
});
post("", (Request request, Response response) -> {
JSONObject responseJSON = new JSONObject();
double amount = 0;
int fromAccountId = 0;
int toAccountId = 0;
String transferType = "";
if(request.queryParams().contains("amount")
&& request.queryParams().contains("fromAccountId")
&& request.queryParams().contains("toAccountId")
&& request.queryParams().contains("transferType")) {
amount = Double.parseDouble(request.queryParams("amount"));
fromAccountId = Integer.parseInt(request.queryParams("fromAccountId"));
toAccountId = Integer.parseInt(request.queryParams("toAccountId"));
transferType = request.queryParams("transferType");
TransactionTypeEnum transactionTypeEnum = TransactionTypeEnum.valueOf(transferType);
logger.info("fromAccount: " + accountMap.get(fromAccountId).toString());
logger.info("toAccount: " + accountMap.get(toAccountId).toString());
// Transaction transaction = new Transaction()
// .withAmount(amount)
// .withCreationdate(new Date())
// .withFromAccount(accountMap.get(fromAccountId))
// .withToAccount(accountMap.get(toAccountId))
// .withTransType(transactionTypeEnum)
// .withTime(new Date());
Transaction transaction = accountMap.get(fromAccountId).createCredit()
.withAmount(amount)
.withCreationdate(new Date())
.withFromAccount(accountMap.get(fromAccountId))
.withToAccount(accountMap.get(toAccountId))
.withTransType(transactionTypeEnum)
.withTime(new Date());;
accountMap.get(fromAccountId).transferToAccount(amount, accountMap.get(toAccountId), "");
responseJSON.put("request", "successful");
responseJSON.put("transaction", transaction.toString());
responseJSON.put("transactionFrom", transaction.getFromAccount().toString());
responseJSON.put("transactionTo", transaction.getToAccount().toString());
return responseJSON;
}
responseJSON.put("request", "failed");
return responseJSON;
});
});
});
}
private static void accountMapSetup() {
accountMap = new HashMap<>();
}
private static void apiLogSetup() {
try {
FileHandler fh;
fh = new FileHandler("api.log", true);
SimpleFormatter formatter = new SimpleFormatter();
fh.setFormatter(formatter);
logger.addHandler(fh);
logger.info("*************************************New Session Started*************************************");
} catch (IOException ioe) {};
}
static class ShutdownThread extends Thread {
ShutdownThread() {}
public void run() {
jsonPersistency.bankToJson(bank);
logger.info("*************************************Shutting Down Session*************************************");
}
}
} | Pushing to test
| OpenBankSparkServer/src/main/java/SparkServer.java | Pushing to test | <ide><path>penBankSparkServer/src/main/java/SparkServer.java
<ide> } else {
<ide> accountJson.put("request", "failed");
<ide> accountJson.put("reason","user with id " + id + " does not exist");
<add> jsonArray.add(accountJson);
<ide> }
<ide> }
<ide> }else { |
|
JavaScript | bsd-3-clause | 2fa16fe5664e74963f46cc8a988f74691e547c36 | 0 | aplazame/aplazame-js,aplazame/aplazame-js,aplazame/aplazame-js |
function _key (o, key, value) {
key.split('.').forEach(function (k, i, keys) {
if( i === keys.length - 1 ) {
o[k] = value;
} else {
o[k] = o[k] || {};
o = o[k];
}
});
}
function toCamelCase (text) {
return text.replace(/[_-](\w)/g, function (_matched, letter) {
return letter.toUpperCase();
});
}
function toUnderscoreCase (text) {
return text.replace(/-(\w)/g, function (_matched, letter) {
return '_' + letter;
}).replace(/([a-z])([A-Z])/g, function (_matched, a, b) {
return a + '_' + b;
}).toLowerCase();
}
function deserialize (querystring, decode, throw_errors) {
var result = {};
querystring.split('&').forEach(function (keyValue) {
var matched = keyValue.match(/(.*?)=(.*)/);
if( !matched ) {
if( !throw_errors ) return;
throw new Error('could not parse ' + keyValue);
}
_key(result, toUnderscoreCase(matched[1]), decode ? decodeURI(matched[2]) : matched[2] );
});
return result;
}
module.exports = {
deserialize: deserialize,
toUnderscoreCase: toUnderscoreCase,
toCamelCase: toCamelCase,
};
| src/tools/deserialize.js |
function _key (o, key, value) {
key.split('.').forEach(function (k, i, keys) {
if( i === keys.length - 1 ) {
o[k] = value;
} else {
o[k] = o[k] || {};
o = o[k];
}
});
}
function toCamelCase (text) {
return text.replace(/[_-](\w)/g, function (_matched, letter) {
return letter.toUpperCase();
});
}
function toUnderscoreCase (text) {
return text.replace(/-(\w)/g, function (_matched, letter) {
return '_' + letter;
}).replace(/([a-z])([A-Z])/g, function (_matched, a, b) {
return a + '_' + b;
}).toLowerCase();
}
function deserialize (querystring, decode) {
var result = {};
if( !querystring.trim() ) return;
querystring.split('&').forEach(function (keyValue) {
var matched = keyValue.match(/(.*?)=(.*)/);
if( !matched ) {
throw new Error('could not parse ' + keyValue);
}
_key(result, toUnderscoreCase(matched[1]), decode ? decodeURI(matched[2]) : matched[2] );
});
return result;
}
module.exports = {
deserialize: deserialize,
toUnderscoreCase: toUnderscoreCase,
toCamelCase: toCamelCase,
};
| fixed deserialize issue
| src/tools/deserialize.js | fixed deserialize issue | <ide><path>rc/tools/deserialize.js
<ide> }).toLowerCase();
<ide> }
<ide>
<del>function deserialize (querystring, decode) {
<add>function deserialize (querystring, decode, throw_errors) {
<ide>
<ide> var result = {};
<del>
<del> if( !querystring.trim() ) return;
<ide>
<ide> querystring.split('&').forEach(function (keyValue) {
<ide> var matched = keyValue.match(/(.*?)=(.*)/);
<ide>
<ide> if( !matched ) {
<add> if( !throw_errors ) return;
<ide> throw new Error('could not parse ' + keyValue);
<ide> }
<ide> |
|
Java | apache-2.0 | 441624aaa0277a962a5806e18ac3dbd25e62fe26 | 0 | boundary/boundary-vmware | // Copyright 2014 Boundary, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.boundary.metrics.vmware.poller;
import static com.google.common.base.Preconditions.checkNotNull;
import java.net.URI;
import java.util.Map;
import javax.annotation.concurrent.Immutable;
import org.hibernate.validator.constraints.NotEmpty;
import com.boundary.metrics.vmware.client.metrics.Metric;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
/**
* Represents the information needed to get metrics from a vCenter or ESXi server
*
*/
@Immutable
public class MonitoredEntity {
private final URI uri;
private final String username;
private final String password;
private final String name;
/**
* Map of VMware performance counter full names to Boundary metric descriptions
*/
private final Map<String, Metric> vmMetrics;
/**
* Constructor
*
* @param uri URI to vCenter or ESXi server
* @param username name used to authenticate
* @param password password used to authenticate
* @param name Identifier of the monitored entity
*/
public MonitoredEntity(@JsonProperty("uri") URI uri,
@JsonProperty("username") String username,
@JsonProperty("password") String password,
@JsonProperty("name") String name) {
// Ensure that the user/password/uri are not null since they
// are required to connect to the monitored entity
this.username = checkNotNull(username);
this.password = checkNotNull(password);
this.uri = checkNotNull(uri);
this.name = checkNotNull(name);
// Created a map of the permformance counters we require to collect from
// the monitored entity
// TODO: Externalize the configuration of these counters to allow collection to be
// changed dynamically.
ImmutableMap.Builder<String,Metric> virtualMachineMetrics = ImmutableMap.builder();
virtualMachineMetrics.put("cpu.usage.AVERAGE",
new Metric("SYSTEM_CPU_USAGE_AVERAGE","CPU Average Utilization"));
virtualMachineMetrics.put("cpu.usage.MINIMUM",
new Metric("SYSTEM_CPU_USAGE_MINIMUM","CPU Minimum Utilization"));
virtualMachineMetrics.put("cpu.idle.SUMMATION",
new Metric("SYSTEM_CPU_IDLE_TOTAL","CPU Total Idle"));
virtualMachineMetrics.put("mem.active.MAXIMUM",
new Metric("SYSTEM_MEMORY_ACTIVE_MAXIMUM","Memory Maximum Active"));
virtualMachineMetrics.put("mem.consumed.AVERAGE",
new Metric("SYSTEM_MEMORY_CONSUMED_AVERAGE","Memory Average Consumed"));
virtualMachineMetrics.put("mem.swapused.MAXIMUM",
new Metric("SYSTEM_MEMORY_SWAP_USED_MAXIMUM", "Memory Swap Used Maximum"));
virtualMachineMetrics.put("disk.read.AVERAGE",
new Metric("SYSTEM_DISK_READ_AVERAGE", "Disk Read Average"));
virtualMachineMetrics.put("disk.write.AVERAGE",
new Metric("SYSTEM_DISK_WRITE_AVERAGE", "Disk Write Average"));
vmMetrics = virtualMachineMetrics.build();
}
/**
* Returns the URI endpoint associated with this monitored entity
*
* @return {@link URI}
*/
public URI getUri() {
return uri;
}
/**
* Returns the user name associated with the monitored entity
* @return {@link String}
*/
public String getUsername() {
return username;
}
/**
* Returns the password used to authenticate to the monitored entity
*
* @return {@link String}
*/
public String getPassword() {
return password;
}
/**
* Returns the metrics associatd with this monitored entity
* @return {@link Map}
*/
public Map<String, Metric> getMetrics() {
return vmMetrics;
}
/**
* Returns the name of the metric client
*
* @return {@link String}
*/
public String getName() {
return name;
}
}
| src/main/java/com/boundary/metrics/vmware/poller/MonitoredEntity.java | // Copyright 2014 Boundary, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.boundary.metrics.vmware.poller;
import static com.google.common.base.Preconditions.checkNotNull;
import java.net.URI;
import java.util.Map;
import javax.annotation.concurrent.Immutable;
import org.hibernate.validator.constraints.NotEmpty;
import com.boundary.metrics.vmware.client.metrics.Metric;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
/**
* Represents the information needed to get metrics from a vCenter or ESXi server
*
*/
@Immutable
public class MonitoredEntity {
private final URI uri;
private final String username;
private final String password;
private final String name;
/**
* Map of VMware performance counter full names to Boundary metric descriptions
*/
private final Map<String, Metric> vmMetrics;
/**
* Constructor
*
* @param uri URI to vCenter or ESXi server
* @param username name used to authenticate
* @param password password used to authenticate
* @param name Identifier of the monitored entity
*/
public MonitoredEntity(@JsonProperty("uri") URI uri,
@JsonProperty("username") String username,
@JsonProperty("password") String password,
@JsonProperty("name") String name) {
// Ensure that the user/password/uri are not null since they
// are required to connect to the monitored entity
this.username = checkNotNull(username);
this.password = checkNotNull(password);
this.uri = checkNotNull(uri);
this.name = checkNotNull(name);
// Created a map of the permformance counters we require to collect from
// the monitored entity
// TODO: Externalize the configuration of these counters to allow collection to be
// changed dynamically.
ImmutableMap.Builder<String,Metric> virtualMachineMetrics = ImmutableMap.builder();
virtualMachineMetrics.put("cpu.usage.AVERAGE",
new Metric("SYSTEM_CPU_USAGE_AVERAGE","CPU Average Utilization"));
virtualMachineMetrics.put("cpu.usage.MINIUM",
new Metric("SYSTEM_CPU_USAGE_MINIMUM","CPU Minimum Utilization"));
virtualMachineMetrics.put("cpu.idle.summation",
new Metric("SYSTEM_CPU_IDLE_TOTAL","CPU Total Idle"));
virtualMachineMetrics.put("mem.active.MAXIMUM",
new Metric("SYSTEM_MEMORY_ACTIVE_MAXIMUM","Memory Maximum Active"));
virtualMachineMetrics.put("mem.consumed.AVERAGE",
new Metric("SYSTEM_MEMORY_CONSUMED_AVERAGE","Memory Average Consumed"));
virtualMachineMetrics.put("mem.swapused.MAXIMUM",
new Metric("SYSTEM_MEMORY_SWAP_USED_MAXIMUM", "Memory Swap Used Maximum"));
virtualMachineMetrics.put("disk.read.AVERAGE",
new Metric("SYSTEM_DISK_READ_AVERAGE", "Disk Read Average"));
virtualMachineMetrics.put("disk.write.AVERAGE",
new Metric("SYSTEM_DISK_WRITE_AVERAGE", "Disk Write Average"));
vmMetrics = virtualMachineMetrics.build();
}
/**
* Returns the URI endpoint associated with this monitored entity
*
* @return {@link URI}
*/
public URI getUri() {
return uri;
}
/**
* Returns the user name associated with the monitored entity
* @return {@link String}
*/
public String getUsername() {
return username;
}
/**
* Returns the password used to authenticate to the monitored entity
*
* @return {@link String}
*/
public String getPassword() {
return password;
}
/**
* Returns the metrics associatd with this monitored entity
* @return {@link Map}
*/
public Map<String, Metric> getMetrics() {
return vmMetrics;
}
/**
* Returns the name of the metric client
*
* @return {@link String}
*/
public String getName() {
return name;
}
}
| Incorrect name of counters | src/main/java/com/boundary/metrics/vmware/poller/MonitoredEntity.java | Incorrect name of counters | <ide><path>rc/main/java/com/boundary/metrics/vmware/poller/MonitoredEntity.java
<ide> ImmutableMap.Builder<String,Metric> virtualMachineMetrics = ImmutableMap.builder();
<ide> virtualMachineMetrics.put("cpu.usage.AVERAGE",
<ide> new Metric("SYSTEM_CPU_USAGE_AVERAGE","CPU Average Utilization"));
<del> virtualMachineMetrics.put("cpu.usage.MINIUM",
<add> virtualMachineMetrics.put("cpu.usage.MINIMUM",
<ide> new Metric("SYSTEM_CPU_USAGE_MINIMUM","CPU Minimum Utilization"));
<del> virtualMachineMetrics.put("cpu.idle.summation",
<add> virtualMachineMetrics.put("cpu.idle.SUMMATION",
<ide> new Metric("SYSTEM_CPU_IDLE_TOTAL","CPU Total Idle"));
<ide> virtualMachineMetrics.put("mem.active.MAXIMUM",
<ide> new Metric("SYSTEM_MEMORY_ACTIVE_MAXIMUM","Memory Maximum Active")); |
|
Java | mit | 6ebb543910bae9ecd48221f9bfae5d530c1db191 | 0 | vickychijwani/quill,vickychijwani/quill,vickychijwani/quill,vickychijwani/quill | package me.vickychijwani.spectre.network;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
import com.crashlytics.android.Crashlytics;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.otto.Bus;
import com.squareup.otto.Subscribe;
import java.net.HttpURLConnection;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import hugo.weaving.DebugLog;
import io.realm.Realm;
import io.realm.RealmObject;
import io.realm.RealmResults;
import me.vickychijwani.spectre.SpectreApplication;
import me.vickychijwani.spectre.event.ApiErrorEvent;
import me.vickychijwani.spectre.event.BlogSettingsLoadedEvent;
import me.vickychijwani.spectre.event.BusProvider;
import me.vickychijwani.spectre.event.CreatePostEvent;
import me.vickychijwani.spectre.event.DataRefreshedEvent;
import me.vickychijwani.spectre.event.LoadBlogSettingsEvent;
import me.vickychijwani.spectre.event.LoadPostEvent;
import me.vickychijwani.spectre.event.LoadPostsEvent;
import me.vickychijwani.spectre.event.LoadUserEvent;
import me.vickychijwani.spectre.event.LoginDoneEvent;
import me.vickychijwani.spectre.event.LoginErrorEvent;
import me.vickychijwani.spectre.event.LoginStartEvent;
import me.vickychijwani.spectre.event.LogoutEvent;
import me.vickychijwani.spectre.event.PostCreatedEvent;
import me.vickychijwani.spectre.event.PostLoadedEvent;
import me.vickychijwani.spectre.event.PostReplacedEvent;
import me.vickychijwani.spectre.event.PostSavedEvent;
import me.vickychijwani.spectre.event.PostsLoadedEvent;
import me.vickychijwani.spectre.event.RefreshDataEvent;
import me.vickychijwani.spectre.event.SavePostEvent;
import me.vickychijwani.spectre.event.SyncPostsEvent;
import me.vickychijwani.spectre.event.UserLoadedEvent;
import me.vickychijwani.spectre.model.AuthReqBody;
import me.vickychijwani.spectre.model.AuthToken;
import me.vickychijwani.spectre.model.ETag;
import me.vickychijwani.spectre.model.PendingAction;
import me.vickychijwani.spectre.model.Post;
import me.vickychijwani.spectre.model.PostList;
import me.vickychijwani.spectre.model.PostStubList;
import me.vickychijwani.spectre.model.RefreshReqBody;
import me.vickychijwani.spectre.model.Setting;
import me.vickychijwani.spectre.model.SettingsList;
import me.vickychijwani.spectre.model.Tag;
import me.vickychijwani.spectre.model.User;
import me.vickychijwani.spectre.model.UserList;
import me.vickychijwani.spectre.pref.AppState;
import me.vickychijwani.spectre.pref.UserPrefs;
import me.vickychijwani.spectre.util.AppUtils;
import me.vickychijwani.spectre.util.DateTimeUtils;
import me.vickychijwani.spectre.util.PostUtils;
import retrofit.Callback;
import retrofit.RequestInterceptor;
import retrofit.RestAdapter;
import retrofit.RetrofitError;
import retrofit.client.OkClient;
import retrofit.client.Response;
import retrofit.converter.GsonConverter;
import rx.Observable;
public class NetworkService {
public static final String TAG = "NetworkService";
private Realm mRealm = null;
private GhostApiService mApi = null;
private AuthToken mAuthToken = null;
private OkHttpClient mOkHttpClient = null;
private GsonConverter mGsonConverter;
private RequestInterceptor mAuthInterceptor;
private boolean mbAuthRequestOnGoing = false;
private ArrayDeque<Object> mApiEventQueue = new ArrayDeque<>();
private ArrayDeque<Object> mRefreshEventsQueue = new ArrayDeque<>();
private ArrayDeque<Object> mPostUploadQueue = new ArrayDeque<>();
public NetworkService() {
Crashlytics.log(Log.DEBUG, TAG, "Initializing NetworkService...");
Gson gson = new GsonBuilder()
.setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.setExclusionStrategies(new RealmExclusionStrategy())
.create();
mGsonConverter = new GsonConverter(gson);
mAuthInterceptor = (request) -> {
if (mAuthToken != null && mAuthToken.isValid() && ! hasAccessTokenExpired()) {
request.addHeader("Authorization", mAuthToken.getTokenType() + " " +
mAuthToken.getAccessToken());
}
};
}
public void start(Context context, OkHttpClient okHttpClient) {
mOkHttpClient = okHttpClient;
getBus().register(this);
mRealm = Realm.getInstance(context);
if (AppState.getInstance(context).getBoolean(AppState.Key.LOGGED_IN)) {
mAuthToken = mRealm.allObjects(AuthToken.class).first();
String blogUrl = UserPrefs.getInstance(context).getString(UserPrefs.Key.BLOG_URL);
mApi = buildApiService(blogUrl);
}
}
// I don't know how to call this from the Application class!
@SuppressWarnings("unused")
public void stop() {
getBus().unregister(this);
mRealm.close();
}
@Subscribe
public void onLoginStartEvent(final LoginStartEvent event) {
if (mbAuthRequestOnGoing) return;
AuthReqBody credentials = new AuthReqBody(event.username, event.password);
mApi = buildApiService(event.blogUrl);
mbAuthRequestOnGoing = true;
mApi.getAuthToken(credentials, new Callback<AuthToken>() {
@Override
public void success(AuthToken authToken, Response response) {
onNewAuthToken(authToken);
getBus().post(new LoginDoneEvent(event.blogUrl, event.username, event.password));
}
@Override
public void failure(RetrofitError error) {
mbAuthRequestOnGoing = false;
getBus().post(new LoginErrorEvent(error));
}
});
}
@Subscribe
public void onRefreshDataEvent(RefreshDataEvent event) {
// do nothing if a refresh is already in progress
if (! mRefreshEventsQueue.isEmpty()) {
refreshDone(null);
return;
}
Bus bus = getBus();
mRefreshEventsQueue.addAll(Arrays.asList(
new LoadUserEvent(true),
new LoadBlogSettingsEvent(true),
new SyncPostsEvent(true)
));
Observable.from(mRefreshEventsQueue)
.forEach(bus::post);
}
@Subscribe
public void onLoadUserEvent(final LoadUserEvent event) {
if (! event.forceNetworkCall) {
RealmResults<User> users = mRealm.allObjects(User.class);
if (users.size() > 0) {
getBus().post(new UserLoadedEvent(users.first()));
refreshDone(event);
return;
}
// else no users found in db, force a network call!
}
if (! validateAccessToken(event)) return;
mApi.getCurrentUser(new Callback<UserList>() {
@Override
public void success(UserList userList, Response response) {
createOrUpdateModel(userList.users);
getBus().post(new UserLoadedEvent(userList.users.get(0)));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
refreshDone(event);
}
});
}
@Subscribe
public void onLoadBlogSettingsEvent(final LoadBlogSettingsEvent event) {
if (! event.forceNetworkCall) {
RealmResults<Setting> settings = mRealm.allObjects(Setting.class);
if (settings.size() > 0) {
getBus().post(new BlogSettingsLoadedEvent(settings));
refreshDone(event);
return;
}
// no settings found in db, force a network call!
}
if (! validateAccessToken(event)) return;
mApi.getSettings(new Callback<SettingsList>() {
@Override
public void success(SettingsList settingsList, Response response) {
createOrUpdateModel(settingsList.settings);
getBus().post(new BlogSettingsLoadedEvent(settingsList.settings));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
refreshDone(event);
}
});
}
@Subscribe
public void onLoadPostsEvent(final LoadPostsEvent event) {
if (! event.forceNetworkCall) {
RealmResults<Post> posts = getPostsSorted();
// if there are no posts, there could be 2 cases:
// 1. there are actually no posts
// 2. we just haven't fetched any posts from the server yet (Realm returns an empty list in this case too)
if (posts.size() > 0) {
getBus().post(new PostsLoadedEvent(posts));
refreshDone(event);
return;
}
}
if (! validateAccessToken(event)) return;
RealmResults<ETag> etags = mRealm.allObjects(ETag.class);
String etagStr = "";
if (etags.size() > 0) {
etagStr = etags.first().getTag();
if (etagStr == null) etagStr = "";
}
mApi.getPosts(etagStr, new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
// update the stored etag
Observable.from(response.getHeaders())
.takeFirst(h -> "ETag".equals(h.getName()))
.forEach(h -> createOrUpdateModel(new ETag(h.getValue())));
// delete posts that are no longer present on the server
// this assumes that postList.posts is a list of ALL posts on the server
RealmResults<Post> cachedPosts = mRealm
.where(Post.class)
.equalTo("status", Post.DRAFT)
.or().equalTo("status", Post.PUBLISHED)
.findAll();
// FIXME time complexity is quadratic in the number of posts!
Observable.from(cachedPosts)
.filter(cached -> !postList.contains(cached.getUuid()))
.toList()
.forEach(NetworkService.this::deleteModels);
// make sure drafts have a publishedAt of FAR_FUTURE so they're sorted to the top
Observable.from(postList.posts)
.filter(post -> post.getPublishedAt() == null)
.forEach(post -> post.setPublishedAt(DateTimeUtils.FAR_FUTURE));
// now create / update received posts
createOrUpdateModel(postList.posts);
getBus().post(new PostsLoadedEvent(getPostsSorted()));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
getBus().post(new PostsLoadedEvent(getPostsSorted())); // broadcast stale data anyway
refreshDone(event);
}
});
}
@Subscribe
public void onLoadPostEvent(LoadPostEvent event) {
Post post = mRealm.where(Post.class).equalTo("uuid", event.uuid).findFirst();
if (post != null) {
Post postCopy = new Post(post); // copy isn't tied to db, so it can be mutated freely
getBus().post(new PostLoadedEvent(postCopy));
} else {
Log.wtf(TAG, "No post with uuid = " + event.uuid + " found!");
}
}
@Subscribe
public void onCreatePostEvent(final CreatePostEvent event) {
Post newPost = new Post();
PostUtils.addPendingAction(newPost, PendingAction.CREATE);
newPost.setUuid(getTempUniqueId(Post.class));
createOrUpdateModel(newPost); // save the local post to db
getBus().post(new PostCreatedEvent(newPost));
getBus().post(new SyncPostsEvent(false));
}
@Subscribe
public void onSyncPostsEvent(final SyncPostsEvent event) {
final RealmResults<Post> localNewPosts = mRealm.where(Post.class)
.equalTo("pendingActions.type", PendingAction.CREATE)
.findAllSorted("uuid", false);
final RealmResults<Post> localEditedPosts = mRealm.where(Post.class)
.equalTo("pendingActions.type", PendingAction.EDIT)
.findAll();
// nothing to upload
if (localNewPosts.isEmpty() && localEditedPosts.isEmpty() && event.refreshPosts) {
LoadPostsEvent loadPostsEvent = new LoadPostsEvent(true);
mRefreshEventsQueue.add(loadPostsEvent);
getBus().post(loadPostsEvent);
refreshDone(event);
return;
}
// keep track of new posts uploaded successfully, so the local copies can be deleted
List<Post> localNewPostsUploaded = new ArrayList<>();
final Runnable syncFinishedCB = () -> {
// delete local copies of only those new posts that were successfully uploaded
deleteModels(localNewPostsUploaded);
// if refreshPosts is true, first load from the db, AND only then from the network,
// to avoid a crash because local posts have been deleted above but are still being
// displayed, so we need to refresh the UI first
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (event.refreshPosts) {
LoadPostsEvent loadPostsEvent = new LoadPostsEvent(true);
mRefreshEventsQueue.add(loadPostsEvent);
getBus().post(loadPostsEvent);
}
refreshDone(event);
};
mPostUploadQueue.addAll(localNewPosts);
mPostUploadQueue.addAll(localEditedPosts);
// the loop variable is *local* to the loop block, so it can be captured in a closure easily
// this is unlike JavaScript, in which the same loop variable is mutated
for (final Post localPost : localNewPosts) {
if (! validateAccessToken(event)) return;
mApi.createPost(PostStubList.from(localPost), new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
createOrUpdateModel(postList.posts, () -> {
localPost.getPendingActions().clear();
});
mPostUploadQueue.removeFirstOccurrence(localPost);
localNewPostsUploaded.add(localPost);
// FIXME this is a new post! how do subscribers know which post changed?
getBus().post(new PostReplacedEvent(postList.posts.get(0)));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
@Override
public void failure(RetrofitError error) {
mPostUploadQueue.removeFirstOccurrence(localPost);
getBus().post(new ApiErrorEvent(error));
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
});
}
// the loop variable is *local* to the loop block, so it can be captured in a closure easily
// this is unlike JavaScript, in which the same loop variable is mutated
for (final Post localPost : localEditedPosts) {
if (! validateAccessToken(event)) return;
PostStubList postStubList = PostStubList.from(localPost);
mApi.updatePost(localPost.getId(), postStubList, new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
createOrUpdateModel(postList.posts, () -> {
localPost.getPendingActions().clear();
});
mPostUploadQueue.removeFirstOccurrence(localPost);
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
@Override
public void failure(RetrofitError error) {
mPostUploadQueue.removeFirstOccurrence(localPost);
getBus().post(new ApiErrorEvent(error));
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
});
}
}
@Subscribe
public void onSavePostEvent(SavePostEvent event) {
PostUtils.addPendingAction(event.post, PendingAction.EDIT);
for (Tag tag : event.post.getTags()) {
if (tag.getUuid() == null) {
tag.setUuid(getTempUniqueId(Tag.class));
createOrUpdateModel(tag);
}
}
createOrUpdateModel(event.post); // save the local post to db
getBus().post(new PostSavedEvent());
getBus().post(new SyncPostsEvent(false));
}
@Subscribe
public void onLogoutEvent(LogoutEvent event) {
// clear all persisted blog data to avoid primary key conflicts
mRealm.close();
Realm.deleteRealmFile(event.context);
mRealm = Realm.getInstance(event.context);
AppState.getInstance(SpectreApplication.getInstance())
.setBoolean(AppState.Key.LOGGED_IN, false);
}
// private methods
private boolean validateAccessToken(@NonNull Object event) {
boolean valid = ! hasAccessTokenExpired();
if (! valid) {
refreshAccessToken(event);
}
return valid;
}
@DebugLog
private void refreshAccessToken(@Nullable final Object eventToDefer) {
mApiEventQueue.addLast(eventToDefer);
if (mbAuthRequestOnGoing) return;
// don't waste bandwidth by trying to use an expired refresh token
if (hasRefreshTokenExpired()) {
postLoginStartEvent();
return;
}
final RefreshReqBody credentials = new RefreshReqBody(mAuthToken.getRefreshToken());
mbAuthRequestOnGoing = true;
mApi.refreshAuthToken(credentials, new Callback<AuthToken>() {
@Override
public void success(AuthToken authToken, Response response) {
// since this is a *refreshed* auth token, there is no refresh token in it, so add
// it manually
authToken.setRefreshToken(credentials.refreshToken);
onNewAuthToken(authToken);
}
@Override
public void failure(RetrofitError error) {
mbAuthRequestOnGoing = false;
// if the response is 401 Unauthorized, we can recover from it by logging in anew
// but this should never happen because we first check if the refresh token is valid
if (error.getResponse() != null &&
error.getResponse().getStatus() == HttpURLConnection.HTTP_UNAUTHORIZED) {
postLoginStartEvent();
Log.e(TAG, "Expired refresh token used! You're wasting bandwidth / battery!");
} else {
getBus().post(new LoginErrorEvent(error));
}
}
});
}
private void flushApiEventQueue() {
Bus bus = getBus();
while (! mApiEventQueue.isEmpty()) {
bus.post(mApiEventQueue.remove());
}
}
private void refreshDone(@Nullable Object sourceEvent) {
mRefreshEventsQueue.removeFirstOccurrence(sourceEvent);
if (mRefreshEventsQueue.isEmpty()) {
getBus().post(new DataRefreshedEvent());
}
}
@DebugLog
private void postLoginStartEvent() {
UserPrefs prefs = UserPrefs.getInstance(SpectreApplication.getInstance());
String blogUrl = prefs.getString(UserPrefs.Key.BLOG_URL);
String username = prefs.getString(UserPrefs.Key.USERNAME);
String password = prefs.getString(UserPrefs.Key.PASSWORD);
getBus().post(new LoginStartEvent(blogUrl, username, password));
}
@DebugLog
private void onNewAuthToken(AuthToken authToken) {
Log.d(TAG, "Got new access token = " + authToken.getAccessToken());
mbAuthRequestOnGoing = false;
authToken.setCreatedAt(DateTimeUtils.getEpochSeconds());
mAuthToken = createOrUpdateModel(authToken);
AppState.getInstance(SpectreApplication.getInstance())
.setBoolean(AppState.Key.LOGGED_IN, true);
flushApiEventQueue();
}
private boolean hasAccessTokenExpired() {
// consider the token as "expired" 60 seconds earlier, because the createdAt timestamp can
// be off by several seconds
return DateTimeUtils.getEpochSeconds() > mAuthToken.getCreatedAt() +
mAuthToken.getExpiresIn() - 60;
}
private boolean hasRefreshTokenExpired() {
// consider the token as "expired" 60 seconds earlier, because the createdAt timestamp can
// be off by several seconds
return DateTimeUtils.getEpochSeconds() > mAuthToken.getCreatedAt() + 86400 - 60;
}
private GhostApiService buildApiService(@NonNull String blogUrl) {
RestAdapter restAdapter = new RestAdapter.Builder()
.setEndpoint(AppUtils.pathJoin(blogUrl, "ghost/api/v0.1"))
.setClient(new OkClient(mOkHttpClient))
.setConverter(mGsonConverter)
.setRequestInterceptor(mAuthInterceptor)
.setLogLevel(RestAdapter.LogLevel.HEADERS)
.build();
return restAdapter.create(GhostApiService.class);
}
private RealmResults<Post> getPostsSorted() {
RealmResults<Post> posts = mRealm.allObjects(Post.class);
posts.sort(new String[]{ "publishedAt", "updatedAt" }, new boolean[]{ false, false });
return posts;
}
/**
* Generates a temporary primary key until the actual id is generated by the server. <b>Be
* careful when calling this in a loop, if you don't save the object before calling it again,
* you'll get the same id twice!</b>
*/
@NonNull
public <T extends RealmObject> String getTempUniqueId(Class<T> clazz) {
int tempId = Integer.MAX_VALUE;
while (mRealm.where(clazz).equalTo("uuid", String.valueOf(tempId)).findAll().size() > 0) {
--tempId;
}
return String.valueOf(tempId);
}
private <T extends RealmObject> T createOrUpdateModel(T object) {
return createOrUpdateModel(object, null);
}
private <T extends RealmObject> T createOrUpdateModel(T object, @Nullable Runnable transaction) {
mRealm.beginTransaction();
T realmObject = mRealm.copyToRealmOrUpdate(object);
if (transaction != null) {
transaction.run();
}
mRealm.commitTransaction();
return realmObject;
}
private <T extends RealmObject> List<T> createOrUpdateModel(Iterable<T> objects) {
return createOrUpdateModel(objects, null);
}
private <T extends RealmObject> List<T> createOrUpdateModel(Iterable<T> objects,
@Nullable Runnable transaction) {
mRealm.beginTransaction();
List<T> realmObjects = mRealm.copyToRealmOrUpdate(objects);
if (transaction != null) {
transaction.run();
}
mRealm.commitTransaction();
return realmObjects;
}
private <T extends RealmObject> void deleteModels(List<T> realmObjects) {
mRealm.beginTransaction();
Observable.from(realmObjects).forEach(T::removeFromRealm);
mRealm.commitTransaction();
}
private Bus getBus() {
return BusProvider.getBus();
}
}
| app/src/main/java/me/vickychijwani/spectre/network/NetworkService.java | package me.vickychijwani.spectre.network;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
import com.crashlytics.android.Crashlytics;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.otto.Bus;
import com.squareup.otto.Subscribe;
import java.net.HttpURLConnection;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import hugo.weaving.DebugLog;
import io.realm.Realm;
import io.realm.RealmObject;
import io.realm.RealmResults;
import me.vickychijwani.spectre.SpectreApplication;
import me.vickychijwani.spectre.event.ApiErrorEvent;
import me.vickychijwani.spectre.event.BlogSettingsLoadedEvent;
import me.vickychijwani.spectre.event.BusProvider;
import me.vickychijwani.spectre.event.CreatePostEvent;
import me.vickychijwani.spectre.event.DataRefreshedEvent;
import me.vickychijwani.spectre.event.LoadBlogSettingsEvent;
import me.vickychijwani.spectre.event.LoadPostEvent;
import me.vickychijwani.spectre.event.LoadPostsEvent;
import me.vickychijwani.spectre.event.LoadUserEvent;
import me.vickychijwani.spectre.event.LoginDoneEvent;
import me.vickychijwani.spectre.event.LoginErrorEvent;
import me.vickychijwani.spectre.event.LoginStartEvent;
import me.vickychijwani.spectre.event.LogoutEvent;
import me.vickychijwani.spectre.event.PostCreatedEvent;
import me.vickychijwani.spectre.event.PostLoadedEvent;
import me.vickychijwani.spectre.event.PostReplacedEvent;
import me.vickychijwani.spectre.event.PostSavedEvent;
import me.vickychijwani.spectre.event.PostsLoadedEvent;
import me.vickychijwani.spectre.event.RefreshDataEvent;
import me.vickychijwani.spectre.event.SavePostEvent;
import me.vickychijwani.spectre.event.SyncPostsEvent;
import me.vickychijwani.spectre.event.UserLoadedEvent;
import me.vickychijwani.spectre.model.AuthReqBody;
import me.vickychijwani.spectre.model.AuthToken;
import me.vickychijwani.spectre.model.ETag;
import me.vickychijwani.spectre.model.PendingAction;
import me.vickychijwani.spectre.model.Post;
import me.vickychijwani.spectre.model.PostList;
import me.vickychijwani.spectre.model.PostStubList;
import me.vickychijwani.spectre.model.RefreshReqBody;
import me.vickychijwani.spectre.model.Setting;
import me.vickychijwani.spectre.model.SettingsList;
import me.vickychijwani.spectre.model.Tag;
import me.vickychijwani.spectre.model.User;
import me.vickychijwani.spectre.model.UserList;
import me.vickychijwani.spectre.pref.AppState;
import me.vickychijwani.spectre.pref.UserPrefs;
import me.vickychijwani.spectre.util.AppUtils;
import me.vickychijwani.spectre.util.DateTimeUtils;
import me.vickychijwani.spectre.util.PostUtils;
import retrofit.Callback;
import retrofit.RequestInterceptor;
import retrofit.RestAdapter;
import retrofit.RetrofitError;
import retrofit.client.OkClient;
import retrofit.client.Response;
import retrofit.converter.GsonConverter;
import rx.Observable;
public class NetworkService {
public static final String TAG = "NetworkService";
private Realm mRealm = null;
private GhostApiService mApi = null;
private AuthToken mAuthToken = null;
private OkHttpClient mOkHttpClient = null;
private GsonConverter mGsonConverter;
private RequestInterceptor mAuthInterceptor;
private boolean mbAuthRequestOnGoing = false;
private ArrayDeque<Object> mApiEventQueue = new ArrayDeque<>();
private ArrayDeque<Object> mRefreshEventsQueue = new ArrayDeque<>();
private ArrayDeque<Object> mPostUploadQueue = new ArrayDeque<>();
public NetworkService() {
Crashlytics.log(Log.DEBUG, TAG, "Initializing NetworkService...");
Gson gson = new GsonBuilder()
.setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.setExclusionStrategies(new RealmExclusionStrategy())
.create();
mGsonConverter = new GsonConverter(gson);
mAuthInterceptor = (request) -> {
if (mAuthToken != null && mAuthToken.isValid() && ! hasAccessTokenExpired()) {
request.addHeader("Authorization", mAuthToken.getTokenType() + " " +
mAuthToken.getAccessToken());
}
};
}
public void start(Context context, OkHttpClient okHttpClient) {
mOkHttpClient = okHttpClient;
getBus().register(this);
mRealm = Realm.getInstance(context);
if (AppState.getInstance(context).getBoolean(AppState.Key.LOGGED_IN)) {
mAuthToken = mRealm.allObjects(AuthToken.class).first();
String blogUrl = UserPrefs.getInstance(context).getString(UserPrefs.Key.BLOG_URL);
mApi = buildApiService(blogUrl);
}
}
// I don't know how to call this from the Application class!
@SuppressWarnings("unused")
public void stop() {
getBus().unregister(this);
mRealm.close();
}
@Subscribe
public void onLoginStartEvent(final LoginStartEvent event) {
if (mbAuthRequestOnGoing) return;
AuthReqBody credentials = new AuthReqBody(event.username, event.password);
mApi = buildApiService(event.blogUrl);
mbAuthRequestOnGoing = true;
mApi.getAuthToken(credentials, new Callback<AuthToken>() {
@Override
public void success(AuthToken authToken, Response response) {
onNewAuthToken(authToken);
getBus().post(new LoginDoneEvent(event.blogUrl, event.username, event.password));
}
@Override
public void failure(RetrofitError error) {
mbAuthRequestOnGoing = false;
getBus().post(new LoginErrorEvent(error));
}
});
}
@Subscribe
public void onRefreshDataEvent(RefreshDataEvent event) {
// do nothing if a refresh is already in progress
if (! mRefreshEventsQueue.isEmpty()) {
refreshDone(null);
return;
}
Bus bus = getBus();
mRefreshEventsQueue.addAll(Arrays.asList(
new LoadUserEvent(true),
new LoadBlogSettingsEvent(true),
new SyncPostsEvent(true)
));
Observable.from(mRefreshEventsQueue)
.forEach(bus::post);
}
@Subscribe
public void onLoadUserEvent(final LoadUserEvent event) {
if (! event.forceNetworkCall) {
RealmResults<User> users = mRealm.allObjects(User.class);
if (users.size() > 0) {
getBus().post(new UserLoadedEvent(users.first()));
refreshDone(event);
return;
}
// else no users found in db, force a network call!
}
if (! validateAccessToken(event)) return;
mApi.getCurrentUser(new Callback<UserList>() {
@Override
public void success(UserList userList, Response response) {
createOrUpdateModel(userList.users);
getBus().post(new UserLoadedEvent(userList.users.get(0)));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
refreshDone(event);
}
});
}
@Subscribe
public void onLoadBlogSettingsEvent(final LoadBlogSettingsEvent event) {
if (! event.forceNetworkCall) {
RealmResults<Setting> settings = mRealm.allObjects(Setting.class);
if (settings.size() > 0) {
getBus().post(new BlogSettingsLoadedEvent(settings));
refreshDone(event);
return;
}
// no settings found in db, force a network call!
}
if (! validateAccessToken(event)) return;
mApi.getSettings(new Callback<SettingsList>() {
@Override
public void success(SettingsList settingsList, Response response) {
createOrUpdateModel(settingsList.settings);
getBus().post(new BlogSettingsLoadedEvent(settingsList.settings));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
refreshDone(event);
}
});
}
@Subscribe
public void onLoadPostsEvent(final LoadPostsEvent event) {
if (! event.forceNetworkCall) {
RealmResults<Post> posts = getPostsSorted();
// if there are no posts, there could be 2 cases:
// 1. there are actually no posts
// 2. we just haven't fetched any posts from the server yet (Realm returns an empty list in this case too)
if (posts.size() > 0) {
getBus().post(new PostsLoadedEvent(posts));
refreshDone(event);
return;
}
}
if (! validateAccessToken(event)) return;
RealmResults<ETag> etags = mRealm.allObjects(ETag.class);
String etagStr = "";
if (etags.size() > 0) {
etagStr = etags.first().getTag();
if (etagStr == null) etagStr = "";
}
mApi.getPosts(etagStr, new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
// update the stored etag
Observable.from(response.getHeaders())
.takeFirst(h -> "ETag".equals(h.getName()))
.forEach(h -> createOrUpdateModel(new ETag(h.getValue())));
// delete posts that are no longer present on the server
// this assumes that postList.posts is a list of ALL posts on the server
RealmResults<Post> cachedPosts = mRealm
.where(Post.class)
.equalTo("status", Post.DRAFT)
.or().equalTo("status", Post.PUBLISHED)
.findAll();
// FIXME time complexity is quadratic in the number of posts!
Observable.from(cachedPosts)
.filter(cached -> !postList.contains(cached.getUuid()))
.toList()
.forEach(NetworkService.this::deleteModels);
// make sure drafts have a publishedAt of FAR_FUTURE so they're sorted to the top
Observable.from(postList.posts)
.filter(post -> post.getPublishedAt() == null)
.forEach(post -> post.setPublishedAt(DateTimeUtils.FAR_FUTURE));
// now create / update received posts
createOrUpdateModel(postList.posts);
getBus().post(new PostsLoadedEvent(getPostsSorted()));
refreshDone(event);
}
@Override
public void failure(RetrofitError error) {
getBus().post(new ApiErrorEvent(error));
refreshDone(event);
}
});
}
@Subscribe
public void onLoadPostEvent(LoadPostEvent event) {
Post post = mRealm.where(Post.class).equalTo("uuid", event.uuid).findFirst();
if (post != null) {
Post postCopy = new Post(post); // copy isn't tied to db, so it can be mutated freely
getBus().post(new PostLoadedEvent(postCopy));
} else {
Log.wtf(TAG, "No post with uuid = " + event.uuid + " found!");
}
}
@Subscribe
public void onCreatePostEvent(final CreatePostEvent event) {
Post newPost = new Post();
PostUtils.addPendingAction(newPost, PendingAction.CREATE);
newPost.setUuid(getTempUniqueId(Post.class));
createOrUpdateModel(newPost); // save the local post to db
getBus().post(new PostCreatedEvent(newPost));
getBus().post(new SyncPostsEvent(false));
}
@Subscribe
public void onSyncPostsEvent(final SyncPostsEvent event) {
final RealmResults<Post> localNewPosts = mRealm.where(Post.class)
.equalTo("pendingActions.type", PendingAction.CREATE)
.findAllSorted("uuid", false);
final RealmResults<Post> localEditedPosts = mRealm.where(Post.class)
.equalTo("pendingActions.type", PendingAction.EDIT)
.findAll();
// nothing to upload
if (localNewPosts.isEmpty() && localEditedPosts.isEmpty() && event.refreshPosts) {
LoadPostsEvent loadPostsEvent = new LoadPostsEvent(true);
mRefreshEventsQueue.add(loadPostsEvent);
getBus().post(loadPostsEvent);
refreshDone(event);
return;
}
// keep track of new posts uploaded successfully, so the local copies can be deleted
List<Post> localNewPostsUploaded = new ArrayList<>();
final Runnable syncFinishedCB = () -> {
// delete local copies of only those new posts that were successfully uploaded
deleteModels(localNewPostsUploaded);
// if refreshPosts is true, first load from the db, AND only then from the network,
// to avoid a crash because local posts have been deleted above but are still being
// displayed, so we need to refresh the UI first
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (event.refreshPosts) {
LoadPostsEvent loadPostsEvent = new LoadPostsEvent(true);
mRefreshEventsQueue.add(loadPostsEvent);
getBus().post(loadPostsEvent);
}
refreshDone(event);
};
mPostUploadQueue.addAll(localNewPosts);
mPostUploadQueue.addAll(localEditedPosts);
// the loop variable is *local* to the loop block, so it can be captured in a closure easily
// this is unlike JavaScript, in which the same loop variable is mutated
for (final Post localPost : localNewPosts) {
if (! validateAccessToken(event)) return;
mApi.createPost(PostStubList.from(localPost), new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
createOrUpdateModel(postList.posts, () -> {
localPost.getPendingActions().clear();
});
mPostUploadQueue.removeFirstOccurrence(localPost);
localNewPostsUploaded.add(localPost);
// FIXME this is a new post! how do subscribers know which post changed?
getBus().post(new PostReplacedEvent(postList.posts.get(0)));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
@Override
public void failure(RetrofitError error) {
mPostUploadQueue.removeFirstOccurrence(localPost);
getBus().post(new ApiErrorEvent(error));
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
});
}
// the loop variable is *local* to the loop block, so it can be captured in a closure easily
// this is unlike JavaScript, in which the same loop variable is mutated
for (final Post localPost : localEditedPosts) {
if (! validateAccessToken(event)) return;
PostStubList postStubList = PostStubList.from(localPost);
mApi.updatePost(localPost.getId(), postStubList, new Callback<PostList>() {
@Override
public void success(PostList postList, Response response) {
createOrUpdateModel(postList.posts, () -> {
localPost.getPendingActions().clear();
});
mPostUploadQueue.removeFirstOccurrence(localPost);
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
@Override
public void failure(RetrofitError error) {
mPostUploadQueue.removeFirstOccurrence(localPost);
getBus().post(new ApiErrorEvent(error));
getBus().post(new PostsLoadedEvent(getPostsSorted()));
if (mPostUploadQueue.isEmpty()) syncFinishedCB.run();
}
});
}
}
@Subscribe
public void onSavePostEvent(SavePostEvent event) {
PostUtils.addPendingAction(event.post, PendingAction.EDIT);
for (Tag tag : event.post.getTags()) {
if (tag.getUuid() == null) {
tag.setUuid(getTempUniqueId(Tag.class));
createOrUpdateModel(tag);
}
}
createOrUpdateModel(event.post); // save the local post to db
getBus().post(new PostSavedEvent());
getBus().post(new SyncPostsEvent(false));
}
@Subscribe
public void onLogoutEvent(LogoutEvent event) {
// clear all persisted blog data to avoid primary key conflicts
mRealm.close();
Realm.deleteRealmFile(event.context);
mRealm = Realm.getInstance(event.context);
AppState.getInstance(SpectreApplication.getInstance())
.setBoolean(AppState.Key.LOGGED_IN, false);
}
// private methods
private boolean validateAccessToken(@NonNull Object event) {
boolean valid = ! hasAccessTokenExpired();
if (! valid) {
refreshAccessToken(event);
}
return valid;
}
@DebugLog
private void refreshAccessToken(@Nullable final Object eventToDefer) {
mApiEventQueue.addLast(eventToDefer);
if (mbAuthRequestOnGoing) return;
// don't waste bandwidth by trying to use an expired refresh token
if (hasRefreshTokenExpired()) {
postLoginStartEvent();
return;
}
final RefreshReqBody credentials = new RefreshReqBody(mAuthToken.getRefreshToken());
mbAuthRequestOnGoing = true;
mApi.refreshAuthToken(credentials, new Callback<AuthToken>() {
@Override
public void success(AuthToken authToken, Response response) {
// since this is a *refreshed* auth token, there is no refresh token in it, so add
// it manually
authToken.setRefreshToken(credentials.refreshToken);
onNewAuthToken(authToken);
}
@Override
public void failure(RetrofitError error) {
mbAuthRequestOnGoing = false;
// if the response is 401 Unauthorized, we can recover from it by logging in anew
// but this should never happen because we first check if the refresh token is valid
if (error.getResponse() != null &&
error.getResponse().getStatus() == HttpURLConnection.HTTP_UNAUTHORIZED) {
postLoginStartEvent();
Log.e(TAG, "Expired refresh token used! You're wasting bandwidth / battery!");
} else {
getBus().post(new LoginErrorEvent(error));
}
}
});
}
private void flushApiEventQueue() {
Bus bus = getBus();
while (! mApiEventQueue.isEmpty()) {
bus.post(mApiEventQueue.remove());
}
}
private void refreshDone(@Nullable Object sourceEvent) {
mRefreshEventsQueue.removeFirstOccurrence(sourceEvent);
if (mRefreshEventsQueue.isEmpty()) {
getBus().post(new DataRefreshedEvent());
}
}
@DebugLog
private void postLoginStartEvent() {
UserPrefs prefs = UserPrefs.getInstance(SpectreApplication.getInstance());
String blogUrl = prefs.getString(UserPrefs.Key.BLOG_URL);
String username = prefs.getString(UserPrefs.Key.USERNAME);
String password = prefs.getString(UserPrefs.Key.PASSWORD);
getBus().post(new LoginStartEvent(blogUrl, username, password));
}
@DebugLog
private void onNewAuthToken(AuthToken authToken) {
Log.d(TAG, "Got new access token = " + authToken.getAccessToken());
mbAuthRequestOnGoing = false;
authToken.setCreatedAt(DateTimeUtils.getEpochSeconds());
mAuthToken = createOrUpdateModel(authToken);
AppState.getInstance(SpectreApplication.getInstance())
.setBoolean(AppState.Key.LOGGED_IN, true);
flushApiEventQueue();
}
private boolean hasAccessTokenExpired() {
// consider the token as "expired" 60 seconds earlier, because the createdAt timestamp can
// be off by several seconds
return DateTimeUtils.getEpochSeconds() > mAuthToken.getCreatedAt() +
mAuthToken.getExpiresIn() - 60;
}
private boolean hasRefreshTokenExpired() {
// consider the token as "expired" 60 seconds earlier, because the createdAt timestamp can
// be off by several seconds
return DateTimeUtils.getEpochSeconds() > mAuthToken.getCreatedAt() + 86400 - 60;
}
private GhostApiService buildApiService(@NonNull String blogUrl) {
RestAdapter restAdapter = new RestAdapter.Builder()
.setEndpoint(AppUtils.pathJoin(blogUrl, "ghost/api/v0.1"))
.setClient(new OkClient(mOkHttpClient))
.setConverter(mGsonConverter)
.setRequestInterceptor(mAuthInterceptor)
.setLogLevel(RestAdapter.LogLevel.HEADERS)
.build();
return restAdapter.create(GhostApiService.class);
}
private RealmResults<Post> getPostsSorted() {
RealmResults<Post> posts = mRealm.allObjects(Post.class);
posts.sort(new String[]{ "publishedAt", "updatedAt" }, new boolean[]{ false, false });
return posts;
}
/**
* Generates a temporary primary key until the actual id is generated by the server. <b>Be
* careful when calling this in a loop, if you don't save the object before calling it again,
* you'll get the same id twice!</b>
*/
@NonNull
public <T extends RealmObject> String getTempUniqueId(Class<T> clazz) {
int tempId = Integer.MAX_VALUE;
while (mRealm.where(clazz).equalTo("uuid", String.valueOf(tempId)).findAll().size() > 0) {
--tempId;
}
return String.valueOf(tempId);
}
private <T extends RealmObject> T createOrUpdateModel(T object) {
return createOrUpdateModel(object, null);
}
private <T extends RealmObject> T createOrUpdateModel(T object, @Nullable Runnable transaction) {
mRealm.beginTransaction();
T realmObject = mRealm.copyToRealmOrUpdate(object);
if (transaction != null) {
transaction.run();
}
mRealm.commitTransaction();
return realmObject;
}
private <T extends RealmObject> List<T> createOrUpdateModel(Iterable<T> objects) {
return createOrUpdateModel(objects, null);
}
private <T extends RealmObject> List<T> createOrUpdateModel(Iterable<T> objects,
@Nullable Runnable transaction) {
mRealm.beginTransaction();
List<T> realmObjects = mRealm.copyToRealmOrUpdate(objects);
if (transaction != null) {
transaction.run();
}
mRealm.commitTransaction();
return realmObjects;
}
private <T extends RealmObject> void deleteModels(List<T> realmObjects) {
mRealm.beginTransaction();
Observable.from(realmObjects).forEach(T::removeFromRealm);
mRealm.commitTransaction();
}
private Bus getBus() {
return BusProvider.getBus();
}
}
| Fix bug in 66b4dd4 (blank screen on startup if logged in).
| app/src/main/java/me/vickychijwani/spectre/network/NetworkService.java | Fix bug in 66b4dd4 (blank screen on startup if logged in). | <ide><path>pp/src/main/java/me/vickychijwani/spectre/network/NetworkService.java
<ide> @Override
<ide> public void failure(RetrofitError error) {
<ide> getBus().post(new ApiErrorEvent(error));
<add> getBus().post(new PostsLoadedEvent(getPostsSorted())); // broadcast stale data anyway
<ide> refreshDone(event);
<ide> }
<ide> }); |
|
Java | apache-2.0 | e9785684128c8c162233d171d176c496dc598856 | 0 | mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron | /*
* Copyright 2014-2021 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.client.ArchiveException;
import io.aeron.archive.client.RecordingSignalPoller;
import io.aeron.archive.codecs.*;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.client.AeronCluster;
import io.aeron.cluster.client.ClusterException;
import io.aeron.cluster.codecs.MessageHeaderDecoder;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.exceptions.AeronException;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.security.Authenticator;
import io.aeron.status.ReadableCounter;
import org.agrona.*;
import org.agrona.collections.*;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static io.aeron.Aeron.NULL_VALUE;
import static io.aeron.CommonContext.*;
import static io.aeron.archive.client.AeronArchive.NULL_LENGTH;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.archive.client.ReplayMerge.LIVE_ADD_MAX_WINDOW;
import static io.aeron.archive.codecs.SourceLocation.LOCAL;
import static io.aeron.cluster.ClusterMember.quorumPosition;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.*;
import static io.aeron.cluster.client.AeronCluster.SESSION_HEADER_LENGTH;
import static io.aeron.cluster.service.ClusteredServiceContainer.Configuration.MARK_FILE_UPDATE_INTERVAL_NS;
import static io.aeron.exceptions.AeronException.Category.WARN;
import static java.lang.Math.min;
import static org.agrona.BitUtil.findNextPositivePowerOfTwo;
final class ConsensusModuleAgent implements Agent
{
static final long SLOW_TICK_INTERVAL_NS = TimeUnit.MILLISECONDS.toNanos(10);
private static final int SERVICE_MESSAGE_LIMIT = 20;
private final long sessionTimeoutNs;
private final long leaderHeartbeatIntervalNs;
private final long leaderHeartbeatTimeoutNs;
private long unavailableCounterHandlerRegistrationId;
private long nextSessionId = 1;
private long nextServiceSessionId = Long.MIN_VALUE + 1;
private long logServiceSessionId = Long.MIN_VALUE;
private long leadershipTermId = NULL_VALUE;
private long replayLeadershipTermId = NULL_VALUE;
private long expectedAckPosition = 0;
private long serviceAckId = 0;
private long terminationPosition = NULL_POSITION;
private long notifiedCommitPosition = 0;
private long lastAppendPosition = 0;
private long timeOfLastLogUpdateNs = 0;
private long timeOfLastAppendPositionNs = 0;
private long slowTickDeadlineNs = 0;
private long markFileUpdateDeadlineNs = 0;
private int pendingServiceMessageHeadOffset = 0;
private int uncommittedServiceMessages = 0;
private int memberId;
private int highMemberId;
private int pendingMemberRemovals = 0;
private long logPublicationChannelTag;
private ReadableCounter appendPosition = null;
private final Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role = Cluster.Role.FOLLOWER;
private ClusterMember[] activeMembers;
private ClusterMember[] passiveMembers = ClusterMember.EMPTY_MEMBERS;
private ClusterMember leaderMember;
private ClusterMember thisMember;
private long[] rankedPositions;
private final long[] serviceClientIds;
private final ArrayDeque<ServiceAck>[] serviceAckQueues;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final ClusterClock clusterClock;
private final TimeUnit clusterTimeUnit;
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ConsensusModuleAdapter consensusModuleAdapter;
private final ServiceProxy serviceProxy;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private final LogAdapter logAdapter;
private final ConsensusAdapter consensusAdapter;
private final ConsensusPublisher consensusPublisher = new ConsensusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final ArrayList<ClusterSession> redirectSessions = new ArrayList<>();
private final Int2ObjectHashMap<ClusterMember> clusterMemberByIdMap = new Int2ObjectHashMap<>();
private final Long2LongCounterMap expiredTimerCountByCorrelationIdMap = new Long2LongCounterMap(0);
private final ArrayDeque<ClusterSession> uncommittedClosedSessions = new ArrayDeque<>();
private final LongArrayQueue uncommittedTimers = new LongArrayQueue(Long.MAX_VALUE);
private final ExpandableRingBuffer pendingServiceMessages = new ExpandableRingBuffer();
private final ExpandableRingBuffer.MessageConsumer serviceSessionMessageAppender =
this::serviceSessionMessageAppender;
private final ExpandableRingBuffer.MessageConsumer leaderServiceSessionMessageSweeper =
this::leaderServiceSessionMessageSweeper;
private final ExpandableRingBuffer.MessageConsumer followerServiceSessionMessageSweeper =
this::followerServiceSessionMessageSweeper;
private final Authenticator authenticator;
private final ClusterSessionProxy sessionProxy;
private final Aeron aeron;
private final ConsensusModule.Context ctx;
private final IdleStrategy idleStrategy;
private final RecordingLog recordingLog;
private final ArrayList<RecordingLog.Snapshot> dynamicJoinSnapshots = new ArrayList<>();
private RecordingLog.RecoveryPlan recoveryPlan;
private AeronArchive archive;
private RecordingSignalPoller recordingSignalPoller;
private Election election;
private DynamicJoin dynamicJoin;
private ClusterTermination clusterTermination;
private long logSubscriptionId = NULL_VALUE;
private long logRecordingId = NULL_VALUE;
private long logRecordedPosition = NULL_POSITION;
private String liveLogDestination;
private String catchupLogDestination;
private String ingressEndpoints;
ConsensusModuleAgent(final ConsensusModule.Context ctx)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.clusterClock = ctx.clusterClock();
this.clusterTimeUnit = clusterClock.timeUnit();
this.sessionTimeoutNs = ctx.sessionTimeoutNs();
this.leaderHeartbeatIntervalNs = ctx.leaderHeartbeatIntervalNs();
this.leaderHeartbeatTimeoutNs = ctx.leaderHeartbeatTimeoutNs();
this.egressPublisher = ctx.egressPublisher();
this.moduleState = ctx.moduleStateCounter();
this.commitPosition = ctx.commitPositionCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = ctx.logPublisher();
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(
this,
clusterTimeUnit,
0,
findNextPositivePowerOfTwo(clusterTimeUnit.convert(ctx.wheelTickResolutionNs(), TimeUnit.NANOSECONDS)),
ctx.ticksPerWheel());
this.activeMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new ClusterSessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.clusterRoleCounter = ctx.clusterNodeRoleCounter();
this.markFile = ctx.clusterMarkFile();
this.recordingLog = ctx.recordingLog();
this.serviceClientIds = new long[ctx.serviceCount()];
Arrays.fill(serviceClientIds, NULL_VALUE);
this.serviceAckQueues = ServiceAck.newArray(ctx.serviceCount());
this.highMemberId = ClusterMember.highMemberId(activeMembers);
aeronClientInvoker = aeron.conductorAgentInvoker();
aeronClientInvoker.invoke();
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
role(Cluster.Role.FOLLOWER);
ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap);
thisMember = ClusterMember.determineMember(activeMembers, ctx.clusterMemberId(), ctx.memberEndpoints());
leaderMember = thisMember;
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
if (!consensusUri.containsKey(ENDPOINT_PARAM_NAME))
{
consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint());
}
consensusAdapter = new ConsensusAdapter(
aeron.addSubscription(consensusUri.toString(), ctx.consensusStreamId()), this);
ClusterMember.addConsensusPublications(activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron);
ingressAdapter = new IngressAdapter(ctx.ingressFragmentLimit(), this);
logAdapter = new LogAdapter(this, ctx.logFragmentLimit());
consensusModuleAdapter = new ConsensusModuleAdapter(
aeron.addSubscription(ctx.controlChannel(), ctx.consensusModuleStreamId()), this);
serviceProxy = new ServiceProxy(aeron.addPublication(ctx.controlChannel(), ctx.serviceStreamId()));
authenticator = ctx.authenticatorSupplier().get();
}
/**
* {@inheritDoc}
*/
public void onClose()
{
if (!aeron.isClosed())
{
aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId);
tryStopLogRecording();
if (!ctx.ownsAeronClient())
{
logPublisher.disconnect(ctx.countedErrorHandler());
logAdapter.disconnect(ctx.countedErrorHandler());
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
for (final ClusterSession session : sessionByIdMap.values())
{
session.close(errorHandler);
}
CloseHelper.close(errorHandler, ingressAdapter);
ClusterMember.closeConsensusPublications(errorHandler, activeMembers);
CloseHelper.close(errorHandler, consensusAdapter);
CloseHelper.close(errorHandler, serviceProxy);
CloseHelper.close(errorHandler, consensusModuleAdapter);
CloseHelper.close(errorHandler, archive);
}
state(ConsensusModule.State.CLOSED);
}
markFile.updateActivityTimestamp(NULL_VALUE);
ctx.close();
}
/**
* {@inheritDoc}
*/
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext().clone());
recordingSignalPoller = new RecordingSignalPoller(
archive.controlSessionId(), archive.controlResponsePoller().subscription());
if (null == (dynamicJoin = requiresDynamicJoin()))
{
final long lastTermRecordingId = recordingLog.findLastTermRecordingId();
if (NULL_VALUE != lastTermRecordingId)
{
archive.tryStopRecordingByIdentity(lastTermRecordingId);
}
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
if (null != recoveryPlan.log)
{
logRecordingId = recoveryPlan.log.recordingId;
}
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
if (!recoveryPlan.snapshots.isEmpty())
{
loadSnapshot(recoveryPlan.snapshots.get(0), archive);
}
while (!ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues))
{
idle(consensusModuleAdapter.poll());
}
captureServiceClientIds();
++serviceAckId;
}
election = new Election(
true,
recoveryPlan.lastLeadershipTermId,
commitPosition.getWeak(),
recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
state(ConsensusModule.State.ACTIVE);
}
unavailableCounterHandlerRegistrationId = aeron.addUnavailableCounterHandler(this::onUnavailableCounter);
}
/**
* {@inheritDoc}
*/
public int doWork()
{
int workCount = 0;
final long now = clusterClock.time();
final long nowNs = clusterTimeUnit.toNanos(now);
if (nowNs >= slowTickDeadlineNs)
{
slowTickDeadlineNs = nowNs + SLOW_TICK_INTERVAL_NS;
workCount += slowTickWork(clusterTimeUnit.toMillis(now), nowNs);
}
workCount += consensusAdapter.poll();
if (null != dynamicJoin)
{
workCount += dynamicJoin.doWork(nowNs);
}
else if (null != election)
{
workCount += election.doWork(nowNs);
}
else
{
workCount += consensusWork(now, nowNs);
}
return workCount;
}
/**
* {@inheritDoc}
*/
public String roleName()
{
return "consensus-module_" + ctx.clusterId() + "_" + memberId;
}
void onSessionConnect(
final long correlationId,
final int responseStreamId,
final int version,
final String responseChannel,
final byte[] encodedCredentials)
{
final long clusterSessionId = Cluster.Role.LEADER == role ? nextSessionId++ : NULL_VALUE;
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.connect(aeron);
final long now = clusterClock.time();
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
if (Cluster.Role.LEADER != role)
{
redirectSessions.add(session);
}
else
{
if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version))
{
final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) +
", cluster is " + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION);
session.reject(EventCode.ERROR, detail);
rejectedSessions.add(session);
}
else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions())
{
session.reject(EventCode.ERROR, SESSION_LIMIT_MSG);
rejectedSessions.add(session);
}
else
{
authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now));
pendingSessions.add(session);
}
}
}
void onSessionClose(final long leadershipTermId, final long clusterSessionId)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
session.closing(CloseReason.CLIENT_ACTION);
session.disconnect(ctx.countedErrorHandler());
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
sessionByIdMap.remove(clusterSessionId);
session.close(ctx.countedErrorHandler());
}
}
}
}
ControlledFragmentAssembler.Action onIngressMessage(
final long leadershipTermId,
final long clusterSessionId,
final DirectBuffer buffer,
final int offset,
final int length)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
final long now = clusterClock.time();
if (logPublisher.appendMessage(leadershipTermId, clusterSessionId, now, buffer, offset, length) > 0)
{
session.timeOfLastActivityNs(clusterTimeUnit.toNanos(now));
return ControlledFragmentHandler.Action.CONTINUE;
}
else
{
return ControlledFragmentHandler.Action.ABORT;
}
}
}
return ControlledFragmentHandler.Action.CONTINUE;
}
void onSessionKeepAlive(final long leadershipTermId, final long clusterSessionId)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
session.timeOfLastActivityNs(clusterTimeUnit.toNanos(clusterClock.time()));
}
}
}
void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] encodedCredentials)
{
if (Cluster.Role.LEADER == role)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long now = clusterClock.time();
final long nowMs = clusterTimeUnit.toMillis(now);
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs);
break;
}
}
}
}
boolean onTimerEvent(final long correlationId)
{
final long appendPosition = logPublisher.appendTimer(correlationId, leadershipTermId, clusterClock.time());
if (appendPosition > 0)
{
uncommittedTimers.offerLong(appendPosition);
uncommittedTimers.offerLong(correlationId);
return true;
}
return false;
}
void onCanvassPosition(
final long logLeadershipTermId,
final long logPosition,
final long leadershipTermId,
final int followerMemberId)
{
if (null != election)
{
election.onCanvassPosition(logLeadershipTermId, logPosition, leadershipTermId, followerMemberId);
}
else if (Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower && logLeadershipTermId <= this.leadershipTermId)
{
final RecordingLog.Entry currentTermEntry = recordingLog.getTermEntry(this.leadershipTermId);
final long termBaseLogPosition = currentTermEntry.termBaseLogPosition;
final long timestamp = ctx.clusterClock().timeNanos();
final long nextLogLeadershipTermId;
final long nextTermBaseLogPosition;
final long nextLogPosition;
if (logLeadershipTermId < this.leadershipTermId)
{
final RecordingLog.Entry nextLogEntry = recordingLog.findTermEntry(logLeadershipTermId + 1);
nextLogLeadershipTermId = null != nextLogEntry ?
nextLogEntry.leadershipTermId : this.leadershipTermId;
nextTermBaseLogPosition = null != nextLogEntry ?
nextLogEntry.termBaseLogPosition : termBaseLogPosition;
nextLogPosition = null != nextLogEntry ? nextLogEntry.logPosition : NULL_POSITION;
}
else
{
nextLogLeadershipTermId = NULL_VALUE;
nextTermBaseLogPosition = NULL_POSITION;
nextLogPosition = NULL_POSITION;
}
final long appendPosition = logPublisher.position();
consensusPublisher.newLeadershipTerm(
follower.publication(),
logLeadershipTermId,
nextLogLeadershipTermId,
nextTermBaseLogPosition,
nextLogPosition,
this.leadershipTermId,
termBaseLogPosition,
appendPosition,
logRecordingId,
timestamp,
memberId,
logPublisher.sessionId(),
false);
}
}
}
void onRequestVote(
final long logLeadershipTermId, final long logPosition, final long candidateTermId, final int candidateId)
{
if (null != election)
{
election.onRequestVote(logLeadershipTermId, logPosition, candidateTermId, candidateId);
}
else if (candidateTermId > leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected vote request", WARN));
enterElection();
}
}
void onVote(
final long candidateTermId,
final long logLeadershipTermId,
final long logPosition,
final int candidateMemberId,
final int followerMemberId,
final boolean vote)
{
if (null != election)
{
election.onVote(
candidateTermId, logLeadershipTermId, logPosition, candidateMemberId, followerMemberId, vote);
}
}
void onNewLeadershipTerm(
final long logLeadershipTermId,
final long nextLeadershipTermId,
final long nextTermBaseLogPosition,
final long nextLogPosition,
final long leadershipTermId,
final long termBaseLogPosition,
final long logPosition,
final long leaderRecordingId,
final long timestamp,
final int leaderId,
final int logSessionId,
final boolean isStartup)
{
if (null != election)
{
election.onNewLeadershipTerm(
logLeadershipTermId,
nextLeadershipTermId,
nextTermBaseLogPosition,
nextLogPosition,
leadershipTermId,
termBaseLogPosition,
logPosition,
leaderRecordingId,
timestamp,
leaderId,
logSessionId,
isStartup);
}
else if (Cluster.Role.FOLLOWER == role &&
leadershipTermId == this.leadershipTermId &&
leaderId == leaderMember.id())
{
notifiedCommitPosition = Math.max(notifiedCommitPosition, logPosition);
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected new leadership term", WARN));
enterElection();
}
}
void onAppendPosition(final long leadershipTermId, final long logPosition, final int followerMemberId)
{
if (null != election)
{
election.onAppendPosition(leadershipTermId, logPosition, followerMemberId);
}
else if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower)
{
follower
.logPosition(logPosition)
.timeOfLastAppendPositionNs(clusterClock.timeNanos());
trackCatchupCompletion(follower, leadershipTermId);
}
}
}
void onCommitPosition(final long leadershipTermId, final long logPosition, final int leaderMemberId)
{
if (null != election)
{
election.onCommitPosition(leadershipTermId, logPosition, leaderMemberId);
}
else if (leadershipTermId == this.leadershipTermId &&
leaderMemberId == leaderMember.id() &&
Cluster.Role.FOLLOWER == role)
{
notifiedCommitPosition = logPosition;
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected commit position", WARN));
enterElection();
}
}
void onCatchupPosition(
final long leadershipTermId, final long logPosition, final int followerMemberId, final String catchupEndpoint)
{
if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower && follower.catchupReplaySessionId() == NULL_VALUE)
{
final String channel = new ChannelUriStringBuilder()
.media(CommonContext.UDP_MEDIA)
.endpoint(catchupEndpoint)
.sessionId(logPublisher.sessionId())
.linger(0L)
.eos(Boolean.FALSE)
.build();
follower.catchupReplaySessionId(archive.startReplay(
logRecordingId, logPosition, Long.MAX_VALUE, channel, ctx.logStreamId()));
follower.catchupReplayCorrelationId(archive.lastCorrelationId());
}
}
}
void onStopCatchup(final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.replayLeadershipTermId && followerMemberId == memberId)
{
if (null != catchupLogDestination)
{
logAdapter.removeDestination(catchupLogDestination);
catchupLogDestination = null;
}
}
}
void onAddPassiveMember(final long correlationId, final String memberEndpoints)
{
if (null == election && null == dynamicJoin)
{
if (Cluster.Role.LEADER == role)
{
if (ClusterMember.notDuplicateEndpoint(passiveMembers, memberEndpoints))
{
final ClusterMember newMember = ClusterMember.parseEndpoints(++highMemberId, memberEndpoints);
newMember.correlationId(correlationId);
passiveMembers = ClusterMember.addMember(passiveMembers, newMember);
clusterMemberByIdMap.put(newMember.id(), newMember);
ClusterMember.addConsensusPublication(
newMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
logPublisher.addDestination(ctx.isLogMdc(), newMember.logEndpoint());
}
}
else if (Cluster.Role.FOLLOWER == role)
{
consensusPublisher.addPassiveMember(leaderMember.publication(), correlationId, memberEndpoints);
}
}
}
void onClusterMembersChange(
final long correlationId, final int leaderMemberId, final String activeMembers, final String passiveMembers)
{
if (null != dynamicJoin)
{
dynamicJoin.onClusterMembersChange(correlationId, leaderMemberId, activeMembers, passiveMembers);
}
}
void onSnapshotRecordingQuery(final long correlationId, final int requestMemberId)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember requester = clusterMemberByIdMap.get(requestMemberId);
if (null != requester)
{
consensusPublisher.snapshotRecording(
requester.publication(),
correlationId,
recoveryPlan,
ClusterMember.encodeAsString(activeMembers));
}
}
}
void onSnapshotRecordings(final long correlationId, final SnapshotRecordingsDecoder decoder)
{
if (null != dynamicJoin)
{
dynamicJoin.onSnapshotRecordings(correlationId, decoder);
}
}
void onJoinCluster(final long leadershipTermId, final int memberId)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
final long snapshotLeadershipTermId = recoveryPlan.snapshots.isEmpty() ?
NULL_VALUE : recoveryPlan.snapshots.get(0).leadershipTermId;
if (null != member && !member.hasRequestedJoin() && leadershipTermId <= snapshotLeadershipTermId)
{
if (null == member.publication())
{
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
final int streamId = ctx.consensusStreamId();
ClusterMember.addConsensusPublication(member, consensusUri, streamId, aeron);
logPublisher.addDestination(ctx.isLogMdc(), member.logEndpoint());
}
member.hasRequestedJoin(true);
}
}
}
void onTerminationPosition(final long leadershipTermId, final long logPosition)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.FOLLOWER == role)
{
terminationPosition = logPosition;
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
}
void onTerminationAck(final long leadershipTermId, final long logPosition, final int memberId)
{
if (leadershipTermId == this.leadershipTermId &&
logPosition >= terminationPosition &&
Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
if (null != member)
{
member.hasTerminated(true);
if (clusterTermination.canTerminate(activeMembers, terminationPosition, clusterClock.timeNanos()))
{
recordingLog.commitLogPosition(leadershipTermId, terminationPosition);
closeAndTerminate();
}
}
}
}
void onBackupQuery(
final long correlationId,
final int responseStreamId,
final int version,
final String responseChannel,
final byte[] encodedCredentials)
{
if (null == election && null == dynamicJoin)
{
if (Cluster.Role.LEADER != role)
{
consensusPublisher.backupQuery(
leaderMember.publication(),
correlationId,
responseStreamId,
version,
responseChannel,
encodedCredentials);
}
else if (state == ConsensusModule.State.ACTIVE || state == ConsensusModule.State.SUSPENDED)
{
final ClusterSession session = new ClusterSession(NULL_VALUE, responseStreamId, responseChannel);
session.markAsBackupSession();
session.connect(aeron);
final long now = clusterClock.time();
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version))
{
final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) +
", cluster=" + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION);
session.reject(EventCode.ERROR, detail);
rejectedSessions.add(session);
}
else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions())
{
session.reject(EventCode.ERROR, SESSION_LIMIT_MSG);
rejectedSessions.add(session);
}
else
{
authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now));
pendingSessions.add(session);
}
}
}
}
void onRemoveMember(final int memberId, final boolean isPassive)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
if (null != member)
{
if (isPassive)
{
passiveMembers = ClusterMember.removeMember(passiveMembers, memberId);
member.closePublication(ctx.countedErrorHandler());
logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint());
clusterMemberByIdMap.remove(memberId);
clusterMemberByIdMap.compact();
}
else
{
final long now = clusterClock.time();
final long position = logPublisher.appendMembershipChangeEvent(
leadershipTermId,
now,
this.memberId,
activeMembers.length,
ChangeType.QUIT,
memberId,
ClusterMember.encodeAsString(ClusterMember.removeMember(activeMembers, memberId)));
if (position > 0)
{
timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs;
member.removalPosition(position);
pendingMemberRemovals++;
}
}
}
}
}
void onClusterMembersQuery(final long correlationId, final boolean isExtendedRequest)
{
if (isExtendedRequest)
{
serviceProxy.clusterMembersExtendedResponse(
correlationId, clusterClock.timeNanos(), leaderMember.id(), memberId, activeMembers, passiveMembers);
}
else
{
serviceProxy.clusterMembersResponse(
correlationId,
leaderMember.id(),
ClusterMember.encodeAsString(activeMembers),
ClusterMember.encodeAsString(passiveMembers));
}
}
void state(final ConsensusModule.State newState)
{
if (newState != state)
{
stateChange(state, newState, memberId);
state = newState;
if (!moduleState.isClosed())
{
moduleState.set(newState.code());
}
}
}
ConsensusModule.State state()
{
return state;
}
void stateChange(final ConsensusModule.State oldState, final ConsensusModule.State newState, final int memberId)
{
//System.out.println("CM State memberId=" + memberId + " " + oldState + " -> " + newState);
}
void role(final Cluster.Role newRole)
{
if (newRole != role)
{
roleChange(role, newRole, memberId);
role = newRole;
if (!clusterRoleCounter.isClosed())
{
clusterRoleCounter.set(newRole.code());
}
}
}
void roleChange(final Cluster.Role oldRole, final Cluster.Role newRole, final int memberId)
{
//System.out.println("CM Role memberId=" + memberId + " " + oldRole + " -> " + newRole);
}
Cluster.Role role()
{
return role;
}
long prepareForNewLeadership(final long logPosition)
{
role(Cluster.Role.FOLLOWER);
CloseHelper.close(ctx.countedErrorHandler(), ingressAdapter);
ClusterControl.ToggleState.deactivate(controlToggle);
if (null != catchupLogDestination)
{
logAdapter.removeDestination(catchupLogDestination);
catchupLogDestination = null;
}
if (null != liveLogDestination)
{
logAdapter.removeDestination(liveLogDestination);
liveLogDestination = null;
}
logAdapter.disconnect(ctx.countedErrorHandler());
logPublisher.disconnect(ctx.countedErrorHandler());
if (RecordingPos.NULL_RECORDING_ID != logRecordingId)
{
tryStopLogRecording();
lastAppendPosition = getLastAppendedPosition();
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
clearSessionsAfter(logPosition);
for (final ClusterSession session : sessionByIdMap.values())
{
session.disconnect(ctx.countedErrorHandler());
}
commitPosition.setOrdered(logPosition);
restoreUncommittedEntries(logPosition);
}
return lastAppendPosition;
}
void onServiceCloseSession(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closing(CloseReason.SERVICE_ACTION);
if (Cluster.Role.LEADER == role &&
logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = CloseReason.SERVICE_ACTION.name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
sessionByIdMap.remove(clusterSessionId);
session.close(ctx.countedErrorHandler());
}
}
}
void onServiceMessage(final long leadershipTermId, final DirectBuffer buffer, final int offset, final int length)
{
if (leadershipTermId == this.leadershipTermId)
{
enqueueServiceSessionMessage((MutableDirectBuffer)buffer, offset, length, nextServiceSessionId++);
}
}
void onScheduleTimer(final long correlationId, final long deadline)
{
if (expiredTimerCountByCorrelationIdMap.get(correlationId) == 0)
{
timerService.scheduleTimerForCorrelationId(correlationId, deadline);
}
else
{
expiredTimerCountByCorrelationIdMap.decrementAndGet(correlationId);
}
}
void onCancelTimer(final long correlationId)
{
timerService.cancelTimerByCorrelationId(correlationId);
}
void onServiceAck(
final long logPosition, final long timestamp, final long ackId, final long relevantId, final int serviceId)
{
captureServiceAck(logPosition, ackId, relevantId, serviceId);
if (ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues))
{
if (ConsensusModule.State.SNAPSHOT == state)
{
final ServiceAck[] serviceAcks = pollServiceAcks(logPosition, serviceId);
++serviceAckId;
takeSnapshot(timestamp, logPosition, serviceAcks);
if (null != clusterTermination)
{
serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler());
clusterTermination.deadlineNs(clusterClock.timeNanos() + ctx.terminationTimeoutNs());
state(ConsensusModule.State.TERMINATING);
}
else
{
state(ConsensusModule.State.ACTIVE);
if (Cluster.Role.LEADER == role)
{
ClusterControl.ToggleState.reset(controlToggle);
}
}
}
else if (ConsensusModule.State.QUITTING == state)
{
closeAndTerminate();
}
else if (ConsensusModule.State.TERMINATING == state)
{
if (null == clusterTermination)
{
consensusPublisher.terminationAck(
leaderMember.publication(), leadershipTermId, logPosition, memberId);
recordingLog.commitLogPosition(leadershipTermId, logPosition);
closeAndTerminate();
}
else
{
clusterTermination.onServicesTerminated();
if (clusterTermination.canTerminate(
activeMembers, terminationPosition, clusterClock.timeNanos()))
{
recordingLog.commitLogPosition(leadershipTermId, logPosition);
closeAndTerminate();
}
}
}
}
}
void onReplaySessionMessage(final long clusterSessionId, final long timestamp)
{
final ClusterSession clusterSession = sessionByIdMap.get(clusterSessionId);
if (null == clusterSession)
{
logServiceSessionId = clusterSessionId;
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
}
else
{
clusterSession.timeOfLastActivityNs(clusterTimeUnit.toNanos(timestamp));
}
}
void onReplayTimerEvent(final long correlationId)
{
if (!timerService.cancelTimerByCorrelationId(correlationId))
{
expiredTimerCountByCorrelationIdMap.getAndIncrement(correlationId);
}
}
void onReplaySessionOpen(
final long logPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(logPosition);
session.lastActivityNs(clusterTimeUnit.toNanos(timestamp), correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onReplaySessionClose(final long clusterSessionId, final CloseReason closeReason)
{
final ClusterSession clusterSession = sessionByIdMap.remove(clusterSessionId);
if (null != clusterSession)
{
clusterSession.closing(closeReason);
clusterSession.close(ctx.countedErrorHandler());
}
}
void onReplayClusterAction(final long leadershipTermId, final ClusterAction action)
{
if (leadershipTermId == this.replayLeadershipTermId)
{
if (ClusterAction.SUSPEND == action)
{
state(ConsensusModule.State.SUSPENDED);
}
else if (ClusterAction.RESUME == action)
{
state(ConsensusModule.State.ACTIVE);
}
else if (ClusterAction.SNAPSHOT == action)
{
state(ConsensusModule.State.SNAPSHOT);
}
}
}
void onReplayNewLeadershipTermEvent(
final long leadershipTermId,
final long logPosition,
final long timestamp,
final long termBaseLogPosition,
final TimeUnit timeUnit,
final int appVersion)
{
if (timeUnit != clusterTimeUnit)
{
ctx.countedErrorHandler().onError(new ClusterException(
"incompatible timestamp units: " + clusterTimeUnit + " log=" + timeUnit,
AeronException.Category.FATAL));
unexpectedTermination();
}
if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion))
{
ctx.countedErrorHandler().onError(new ClusterException(
"incompatible version: " + SemanticVersion.toString(ctx.appVersion()) +
" log=" + SemanticVersion.toString(appVersion),
AeronException.Category.FATAL));
unexpectedTermination();
}
leadershipTermId(leadershipTermId);
if (null != election)
{
election.onReplayNewLeadershipTermEvent(
logRecordingId, leadershipTermId, logPosition, timestamp, termBaseLogPosition);
}
}
void onReplayMembershipChange(
final long leadershipTermId,
final long logPosition,
final int leaderMemberId,
final ChangeType changeType,
final int memberId,
final String clusterMembers)
{
if (leadershipTermId == this.replayLeadershipTermId)
{
if (ChangeType.JOIN == changeType)
{
final ClusterMember[] newMembers = ClusterMember.parse(clusterMembers);
if (memberId == this.memberId)
{
activeMembers = newMembers;
clusterMemberByIdMap.clear();
clusterMemberByIdMap.compact();
ClusterMember.addClusterMemberIds(newMembers, clusterMemberByIdMap);
thisMember = ClusterMember.findMember(activeMembers, memberId);
leaderMember = ClusterMember.findMember(activeMembers, leaderMemberId);
ClusterMember.addConsensusPublications(
newMembers,
thisMember,
ChannelUri.parse(ctx.consensusChannel()),
ctx.consensusStreamId(),
aeron);
}
else
{
clusterMemberJoined(memberId, newMembers);
}
}
else if (ChangeType.QUIT == changeType)
{
if (memberId == this.memberId)
{
state(ConsensusModule.State.QUITTING);
}
else
{
clusterMemberQuit(memberId);
if (leaderMemberId == memberId && null == election)
{
commitPosition.proposeMaxOrdered(logPosition);
enterElection();
}
}
}
if (null != election)
{
election.onMembershipChange(activeMembers, changeType, memberId, logPosition);
}
}
}
void onLoadSession(
final long clusterSessionId,
final long correlationId,
final long openedPosition,
final long timeOfLastActivity,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
sessionByIdMap.put(clusterSessionId, new ClusterSession(
clusterSessionId,
correlationId,
openedPosition,
timeOfLastActivity,
responseStreamId,
responseChannel,
closeReason));
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadPendingMessage(final DirectBuffer buffer, final int offset, final int length)
{
pendingServiceMessages.append(buffer, offset, length);
}
void onLoadConsensusModuleState(
final long nextSessionId,
final long nextServiceSessionId,
final long logServiceSessionId,
final int pendingMessageCapacity)
{
this.nextSessionId = nextSessionId;
this.nextServiceSessionId = nextServiceSessionId;
this.logServiceSessionId = logServiceSessionId;
pendingServiceMessages.reset(pendingMessageCapacity);
}
void onLoadClusterMembers(final int memberId, final int highMemberId, final String members)
{
if (null == dynamicJoin && !ctx.clusterMembersIgnoreSnapshot())
{
if (NULL_VALUE == this.memberId)
{
this.memberId = memberId;
ctx.clusterMarkFile().memberId(memberId);
}
if (ClusterMember.EMPTY_MEMBERS == activeMembers)
{
activeMembers = ClusterMember.parse(members);
this.highMemberId = Math.max(ClusterMember.highMemberId(activeMembers), highMemberId);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
thisMember = clusterMemberByIdMap.get(memberId);
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint());
ClusterMember.addConsensusPublications(
activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron);
}
}
}
int addLogPublication()
{
final long logPublicationTag = aeron.nextCorrelationId();
logPublicationChannelTag = aeron.nextCorrelationId();
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(ALIAS_PARAM_NAME, "log");
channelUri.put(TAGS_PARAM_NAME, logPublicationChannelTag + "," + logPublicationTag);
if (channelUri.isUdp())
{
if (!channelUri.containsKey(FLOW_CONTROL_PARAM_NAME))
{
final long timeout = TimeUnit.NANOSECONDS.toSeconds(ctx.leaderHeartbeatTimeoutNs());
channelUri.put(FLOW_CONTROL_PARAM_NAME, "min,t:" + timeout + "s");
}
if (ctx.isLogMdc())
{
channelUri.put(MDC_CONTROL_MODE_PARAM_NAME, MDC_CONTROL_MODE_MANUAL);
}
channelUri.put(SPIES_SIMULATE_CONNECTION_PARAM_NAME, Boolean.toString(activeMembers.length == 1));
}
if (null != recoveryPlan.log)
{
channelUri.initialPosition(
recoveryPlan.appendedLogPosition, recoveryPlan.log.initialTermId, recoveryPlan.log.termBufferLength);
channelUri.put(MTU_LENGTH_PARAM_NAME, Integer.toString(recoveryPlan.log.mtuLength));
}
final String channel = channelUri.toString();
final ExclusivePublication publication = aeron.addExclusivePublication(channel, ctx.logStreamId());
if (ctx.isLogMdc())
{
for (final ClusterMember member : activeMembers)
{
if (member.id() != memberId)
{
publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint());
}
}
for (final ClusterMember member : passiveMembers)
{
publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint());
}
}
logPublisher.publication(publication);
return publication.sessionId();
}
void joinLogAsLeader(
final long leadershipTermId, final long logPosition, final int logSessionId, final boolean isStartup)
{
final boolean isIpc = ctx.logChannel().startsWith(IPC_CHANNEL);
final String channel = (isIpc ? "aeron:ipc" : "aeron:udp") +
"?tags=" + logPublicationChannelTag + "|session-id=" + logSessionId + "|alias=log";
leadershipTermId(leadershipTermId);
startLogRecording(channel, ctx.logStreamId(), SourceLocation.LOCAL);
createAppendPosition(logSessionId);
awaitServicesReady(
isIpc ? channel : SPY_PREFIX + channel,
ctx.logStreamId(),
logSessionId,
logPosition,
Long.MAX_VALUE,
isStartup,
Cluster.Role.LEADER);
}
void liveLogDestination(final String liveLogDestination)
{
this.liveLogDestination = liveLogDestination;
}
String liveLogDestination()
{
return liveLogDestination;
}
void catchupLogDestination(final String catchupLogDestination)
{
this.catchupLogDestination = catchupLogDestination;
}
String catchupLogDestination()
{
return catchupLogDestination;
}
void joinLogAsFollower(final Image image, final boolean isLeaderStartup)
{
final Subscription logSubscription = image.subscription();
final int streamId = logSubscription.streamId();
final String channel = logSubscription.channel();
startLogRecording(channel, streamId, SourceLocation.REMOTE);
createAppendPosition(image.sessionId());
appendDynamicJoinTermAndSnapshots();
logAdapter.image(image);
lastAppendPosition = image.joinPosition();
awaitServicesReady(
channel,
streamId,
image.sessionId(),
image.joinPosition(),
Long.MAX_VALUE,
isLeaderStartup,
Cluster.Role.FOLLOWER);
}
void awaitServicesReady(
final String logChannel,
final int streamId,
final int logSessionId,
final long logPosition,
final long maxLogPosition,
final boolean isStartup,
final Cluster.Role role)
{
serviceProxy.joinLog(
logPosition,
maxLogPosition,
memberId,
logSessionId,
streamId,
isStartup,
role,
logChannel);
expectedAckPosition = logPosition;
while (!ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues))
{
idle(consensusModuleAdapter.poll());
}
ServiceAck.removeHead(serviceAckQueues);
++serviceAckId;
}
void leadershipTermId(final long leadershipTermId)
{
this.leadershipTermId = leadershipTermId;
this.replayLeadershipTermId = leadershipTermId;
}
LogReplay newLogReplay(final long logPosition, final long appendPosition)
{
return new LogReplay(
archive,
logRecordingId,
logPosition,
appendPosition,
logAdapter,
ctx);
}
int replayLogPoll(final LogAdapter logAdapter, final long stopPosition)
{
int workCount = 0;
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
final int fragments = logAdapter.poll(stopPosition);
final long position = logAdapter.position();
if (fragments > 0)
{
commitPosition.setOrdered(position);
}
else if (logAdapter.isImageClosed() && position < stopPosition)
{
throw new ClusterException("unexpected image close when replaying log: position=" + position);
}
workCount += fragments;
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
long logRecordingId()
{
return logRecordingId;
}
void logRecordingId(final long recordingId)
{
if (NULL_VALUE != recordingId)
{
logRecordingId = recordingId;
}
}
void truncateLogEntry(final long leadershipTermId, final long logPosition)
{
archive.stopAllReplays(logRecordingId);
archive.truncateRecording(logRecordingId, logPosition);
recordingLog.commitLogPosition(leadershipTermId, logPosition);
logAdapter.disconnect(ctx.countedErrorHandler(), logPosition);
}
boolean electionComplete()
{
final long logPosition = election.logPosition();
final long now = clusterClock.time();
final long nowNs = clusterTimeUnit.toNanos(now);
if (Cluster.Role.LEADER == role)
{
if (!logPublisher.isConnected() || !logPublisher.appendNewLeadershipTermEvent(
leadershipTermId,
now,
logPosition,
memberId,
logPublisher.sessionId(),
clusterTimeUnit,
ctx.appVersion()))
{
return false;
}
timeOfLastLogUpdateNs = nowNs - leaderHeartbeatIntervalNs;
timerService.currentTickTime(now);
ClusterControl.ToggleState.activate(controlToggle);
prepareSessionsForNewTerm(election.isLeaderStartup());
}
else
{
timeOfLastLogUpdateNs = nowNs;
timeOfLastAppendPositionNs = nowNs;
}
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
notifiedCommitPosition = logPosition;
commitPosition.setOrdered(logPosition);
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
updateMemberDetails(election.leader());
election = null;
connectIngress();
return true;
}
boolean dynamicJoinComplete()
{
if (0 == activeMembers.length)
{
activeMembers = dynamicJoin.clusterMembers();
ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap);
leaderMember = dynamicJoin.leader();
ClusterMember.addConsensusPublications(
activeMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
}
if (NULL_VALUE == memberId)
{
memberId = dynamicJoin.memberId();
ctx.clusterMarkFile().memberId(memberId);
thisMember.id(memberId);
}
dynamicJoin = null;
election = new Election(
false,
leadershipTermId,
commitPosition.getWeak(),
recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
return true;
}
void trackCatchupCompletion(final ClusterMember follower, final long leadershipTermId)
{
if (NULL_VALUE != follower.catchupReplaySessionId())
{
if (follower.logPosition() >= logPublisher.position())
{
if (NULL_VALUE != follower.catchupReplayCorrelationId())
{
if (archive.archiveProxy().stopReplay(
follower.catchupReplaySessionId(), aeron.nextCorrelationId(), archive.controlSessionId()))
{
follower.catchupReplayCorrelationId(NULL_VALUE);
}
}
if (consensusPublisher.stopCatchup(follower.publication(), leadershipTermId, follower.id()))
{
follower.catchupReplaySessionId(NULL_VALUE);
}
}
}
}
void catchupInitiated(final long nowNs)
{
timeOfLastAppendPositionNs = nowNs;
}
int catchupPoll(final long limitPosition, final long nowNs)
{
int workCount = 0;
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
final int fragments = logAdapter.poll(Math.min(appendPosition.get(), limitPosition));
workCount += fragments;
if (fragments == 0 && logAdapter.image().isClosed())
{
throw new ClusterException("unexpected close replaying log: position=" + logAdapter.image().position());
}
}
final long appendPosition = logAdapter.position();
if (appendPosition > lastAppendPosition || nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs))
{
commitPosition.proposeMaxOrdered(appendPosition);
final ExclusivePublication publication = election.leader().publication();
if (consensusPublisher.appendPosition(publication, replayLeadershipTermId, appendPosition, memberId))
{
lastAppendPosition = appendPosition;
timeOfLastAppendPositionNs = nowNs;
}
}
if (nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatTimeoutNs) && ConsensusModule.State.ACTIVE == state)
{
throw new ClusterException("no catchup progress", WARN);
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
boolean isCatchupNearLive(final long position)
{
final Image image = logAdapter.image();
if (null != image)
{
final long localPosition = image.position();
final long window = Math.min(image.termBufferLength() >> 2, LIVE_ADD_MAX_WINDOW);
return localPosition >= (position - window);
}
return false;
}
void stopAllCatchups()
{
for (final ClusterMember member : activeMembers)
{
if (member.catchupReplaySessionId() != NULL_VALUE)
{
if (member.catchupReplayCorrelationId() != NULL_VALUE)
{
try
{
archive.stopReplay(member.catchupReplaySessionId());
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException("catchup already stopped", ex, WARN));
}
}
member.catchupReplaySessionId(NULL_VALUE);
member.catchupReplayCorrelationId(NULL_VALUE);
}
}
}
void retrievedSnapshot(final long localRecordingId, final RecordingLog.Snapshot leaderSnapshot)
{
dynamicJoinSnapshots.add(new RecordingLog.Snapshot(
localRecordingId,
leaderSnapshot.leadershipTermId,
leaderSnapshot.termBaseLogPosition,
leaderSnapshot.logPosition,
leaderSnapshot.timestamp,
leaderSnapshot.serviceId));
}
Counter loadSnapshotsForDynamicJoin()
{
recoveryPlan = RecordingLog.createRecoveryPlan(dynamicJoinSnapshots);
if (null != recoveryPlan.log)
{
logRecordingId = recoveryPlan.log.recordingId;
}
final Counter recoveryStateCounter = addRecoveryStateCounter(recoveryPlan);
if (!recoveryPlan.snapshots.isEmpty())
{
loadSnapshot(recoveryPlan.snapshots.get(0), archive);
}
return recoveryStateCounter;
}
boolean pollForSnapshotLoadAck(final Counter recoveryStateCounter, final long nowNs)
{
consensusModuleAdapter.poll();
if (ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues))
{
captureServiceClientIds();
++serviceAckId;
CloseHelper.close(ctx.countedErrorHandler(), recoveryStateCounter);
state(ConsensusModule.State.ACTIVE);
timeOfLastLogUpdateNs = nowNs;
leadershipTermId(recoveryPlan.lastLeadershipTermId);
return true;
}
return false;
}
int pollArchiveEvents()
{
int workCount = 0;
if (null != archive)
{
final RecordingSignalPoller poller = this.recordingSignalPoller;
workCount += poller.poll();
if (poller.isPollComplete())
{
final int templateId = poller.templateId();
if (ControlResponseDecoder.TEMPLATE_ID == templateId && poller.code() == ControlResponseCode.ERROR)
{
for (final ClusterMember member : activeMembers)
{
if (member.catchupReplayCorrelationId() != NULL_VALUE &&
member.catchupReplayCorrelationId() == poller.correlationId())
{
member.catchupReplaySessionId(NULL_VALUE);
member.catchupReplayCorrelationId(NULL_VALUE);
ctx.countedErrorHandler().onError(new ClusterException(
"catchup replay failed - " + poller.errorMessage(), WARN));
return workCount;
}
}
final ArchiveException ex = new ArchiveException(
poller.errorMessage(), (int)poller.relevantId(), poller.correlationId());
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
if (null != election)
{
election.handleError(clusterClock.timeNanos(), ex);
}
}
else if (RecordingSignalEventDecoder.TEMPLATE_ID == templateId)
{
final long recordingId = poller.recordingId();
final long position = poller.recordingPosition();
final RecordingSignal signal = poller.recordingSignal();
if (RecordingSignal.STOP == signal && recordingId == logRecordingId)
{
this.logRecordedPosition = position;
}
if (null != election)
{
election.onRecordingSignal(poller.correlationId(), recordingId, position, signal);
}
if (null != dynamicJoin)
{
dynamicJoin.onRecordingSignal(poller.correlationId(), recordingId, position, signal);
}
}
}
else if (0 == workCount && !poller.subscription().isConnected())
{
ctx.countedErrorHandler().onError(new ClusterException("local archive is not connected", WARN));
unexpectedTermination();
}
}
return workCount;
}
private void startLogRecording(final String channel, final int streamId, final SourceLocation sourceLocation)
{
try
{
final long logRecordingId = recordingLog.findLastTermRecordingId();
if (RecordingPos.NULL_RECORDING_ID == logRecordingId)
{
logSubscriptionId = archive.startRecording(channel, streamId, sourceLocation, true);
}
else
{
logSubscriptionId = archive.extendRecording(logRecordingId, channel, streamId, sourceLocation, true);
}
}
catch (final ArchiveException ex)
{
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
throw ex;
}
}
private void prepareSessionsForNewTerm(final boolean isStartup)
{
if (isStartup)
{
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.closing(CloseReason.TIMEOUT);
}
}
}
else
{
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.connect(aeron);
}
}
final long nowNs = clusterClock.timeNanos();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.timeOfLastActivityNs(nowNs);
session.hasNewLeaderEventPending(true);
}
}
}
}
private void updateMemberDetails(final ClusterMember newLeader)
{
leaderMember = newLeader;
for (final ClusterMember clusterMember : activeMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMember.id());
}
ingressEndpoints = ClusterMember.ingressEndpoints(activeMembers);
}
private int slowTickWork(final long nowMs, final long nowNs)
{
int workCount = aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
else if (ConsensusModule.State.CLOSED == state)
{
unexpectedTermination();
}
if (nowNs >= markFileUpdateDeadlineNs)
{
markFileUpdateDeadlineNs = nowNs + MARK_FILE_UPDATE_INTERVAL_NS;
markFile.updateActivityTimestamp(nowMs);
}
workCount += pollArchiveEvents();
workCount += sendRedirects(redirectSessions, nowNs);
workCount += sendRejections(rejectedSessions, nowNs);
if (null == election)
{
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowNs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs, nowNs);
workCount += checkSessions(sessionByIdMap, nowNs);
workCount += processPassiveMembers(passiveMembers);
if (!ClusterMember.hasActiveQuorum(activeMembers, nowNs, leaderHeartbeatTimeoutNs))
{
ctx.countedErrorHandler().onError(new ClusterException("inactive follower quorum", WARN));
enterElection();
workCount += 1;
}
}
else if (ConsensusModule.State.TERMINATING == state)
{
if (clusterTermination.canTerminate(activeMembers, terminationPosition, nowNs))
{
recordingLog.commitLogPosition(leadershipTermId, terminationPosition);
closeAndTerminate();
}
}
}
else if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
if (nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatTimeoutNs) && NULL_POSITION == terminationPosition)
{
ctx.countedErrorHandler().onError(new ClusterException("leader heartbeat timeout", WARN));
enterElection();
workCount += 1;
}
}
}
return workCount;
}
private int consensusWork(final long timestamp, final long nowNs)
{
int workCount = 0;
if (Cluster.Role.LEADER == role)
{
if (ConsensusModule.State.ACTIVE == state)
{
workCount += timerService.poll(timestamp);
workCount += pendingServiceMessages.forEach(
pendingServiceMessageHeadOffset, serviceSessionMessageAppender, SERVICE_MESSAGE_LIMIT);
workCount += ingressAdapter.poll();
}
workCount += updateLeaderPosition(nowNs);
}
else
{
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
if (NULL_POSITION != terminationPosition && logAdapter.position() >= terminationPosition)
{
serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler());
state(ConsensusModule.State.TERMINATING);
}
else
{
final long limit = null != appendPosition ? appendPosition.get() : logRecordedPosition;
final int count = logAdapter.poll(min(notifiedCommitPosition, limit));
if (0 == count && logAdapter.isImageClosed())
{
ctx.countedErrorHandler().onError(new ClusterException("log disconnected from leader", WARN));
enterElection();
return 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
workCount += ingressAdapter.poll();
workCount += count;
}
}
workCount += updateFollowerPosition(nowNs);
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
private int checkControlToggle(final long nowNs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND))
{
state(ConsensusModule.State.SUSPENDED);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT))
{
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT))
{
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
final long position = logPublisher.position();
clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs());
clusterTermination.terminationPosition(
errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position);
terminationPosition = position;
state(ConsensusModule.State.SNAPSHOT);
}
break;
case ABORT:
if (ConsensusModule.State.ACTIVE == state)
{
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
final long position = logPublisher.position();
clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs());
clusterTermination.terminationPosition(
errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position);
terminationPosition = position;
serviceProxy.terminationPosition(terminationPosition, errorHandler);
state(ConsensusModule.State.TERMINATING);
}
break;
default:
return 0;
}
return 1;
}
private boolean appendAction(final ClusterAction action)
{
return logPublisher.appendClusterAction(leadershipTermId, clusterClock.time(), action);
}
private int processPendingSessions(
final ArrayList<ClusterSession> pendingSessions, final long nowMs, final long nowNs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
ctx.timedOutClientCounter().incrementOrdered();
continue;
}
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
if (session.isBackupSession())
{
final RecordingLog.Entry entry = recordingLog.findLastTerm();
if (null != entry && consensusPublisher.backupResponse(
session,
commitPosition.id(),
leaderMember.id(),
entry,
recoveryPlan,
ClusterMember.encodeAsString(activeMembers)))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount += 1;
}
}
else if (appendSessionAndOpen(session, nowNs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
sessionByIdMap.put(session.id(), session);
workCount += 1;
}
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
}
return workCount;
}
private int sendRejections(final ArrayList<ClusterSession> rejectedSessions, final long nowNs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
final String detail = session.responseDetail();
final EventCode eventCode = session.eventCode();
if (egressPublisher.sendEvent(session, leadershipTermId, leaderMember.id(), eventCode, detail) ||
nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount++;
}
}
return workCount;
}
private int sendRedirects(final ArrayList<ClusterSession> redirectSessions, final long nowNs)
{
int workCount = 0;
for (int lastIndex = redirectSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = redirectSessions.get(i);
final EventCode eventCode = EventCode.REDIRECT;
final int leaderId = leaderMember.id();
if (egressPublisher.sendEvent(session, leadershipTermId, leaderId, eventCode, ingressEndpoints) ||
nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(redirectSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount++;
}
}
return workCount;
}
private int processPassiveMembers(final ClusterMember[] passiveMembers)
{
int workCount = 0;
for (final ClusterMember member : passiveMembers)
{
if (member.correlationId() != NULL_VALUE)
{
if (consensusPublisher.clusterMemberChange(
member.publication(),
member.correlationId(),
leaderMember.id(),
ClusterMember.encodeAsString(activeMembers),
ClusterMember.encodeAsString(passiveMembers)))
{
member.correlationId(NULL_VALUE);
workCount++;
}
}
else if (member.hasRequestedJoin() && member.logPosition() == logPublisher.position())
{
final ClusterMember[] newMembers = ClusterMember.addMember(activeMembers, member);
final long now = clusterClock.time();
if (logPublisher.appendMembershipChangeEvent(
leadershipTermId,
now,
leaderMember.id(),
newMembers.length,
ChangeType.JOIN,
member.id(),
ClusterMember.encodeAsString(newMembers)) > 0)
{
timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs;
this.passiveMembers = ClusterMember.removeMember(this.passiveMembers, member.id());
activeMembers = newMembers;
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
member.hasRequestedJoin(false);
workCount++;
break;
}
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowNs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
if (session.state() == OPEN)
{
session.closing(CloseReason.TIMEOUT);
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = session.closeReason().name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
i.remove();
session.close(ctx.countedErrorHandler());
ctx.timedOutClientCounter().incrementOrdered();
workCount++;
}
}
else if (session.state() == CLOSING)
{
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = session.closeReason().name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
i.remove();
session.close(ctx.countedErrorHandler());
if (session.closeReason() == CloseReason.TIMEOUT)
{
ctx.timedOutClientCounter().incrementOrdered();
}
workCount++;
}
}
else
{
i.remove();
session.close(ctx.countedErrorHandler());
workCount++;
}
}
else if (session.hasOpenEventPending())
{
workCount += sendSessionOpenEvent(session);
}
else if (session.hasNewLeaderEventPending())
{
workCount += sendNewLeaderEvent(session);
}
}
return workCount;
}
private void captureServiceAck(final long logPosition, final long ackId, final long relevantId, final int serviceId)
{
if (0 == ackId && NULL_VALUE != serviceClientIds[serviceId])
{
throw new ClusterException(
"initial ack already received from service: possible duplicate serviceId=" + serviceId);
}
serviceAckQueues[serviceId].offerLast(new ServiceAck(ackId, logPosition, relevantId));
}
private ServiceAck[] pollServiceAcks(final long logPosition, final int serviceId)
{
final ServiceAck[] serviceAcks = new ServiceAck[serviceAckQueues.length];
for (int id = 0, length = serviceAckQueues.length; id < length; id++)
{
final ServiceAck serviceAck = serviceAckQueues[id].pollFirst();
if (null == serviceAck || serviceAck.logPosition() != logPosition)
{
throw new ClusterException(
"invalid ack for serviceId=" + serviceId + " logPosition=" + logPosition + " " + serviceAck);
}
serviceAcks[id] = serviceAck;
}
return serviceAcks;
}
private int sendNewLeaderEvent(final ClusterSession session)
{
if (egressPublisher.newLeader(session, leadershipTermId, leaderMember.id(), ingressEndpoints))
{
session.hasNewLeaderEventPending(false);
return 1;
}
return 0;
}
private int sendSessionOpenEvent(final ClusterSession session)
{
if (egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.OK, ""))
{
session.hasOpenEventPending(false);
return 1;
}
return 0;
}
private boolean appendSessionAndOpen(final ClusterSession session, final long nowNs)
{
final long resultingPosition = logPublisher.appendSessionOpen(session, leadershipTermId, clusterClock.time());
if (resultingPosition > 0)
{
session.open(resultingPosition);
session.timeOfLastActivityNs(nowNs);
return true;
}
return false;
}
private void createAppendPosition(final int logSessionId)
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, logSessionId);
long registrationId;
while (0 == (registrationId = counters.getCounterRegistrationId(counterId)))
{
idle();
}
logRecordingId = RecordingPos.getRecordingId(counters, counterId);
appendPosition = new ReadableCounter(counters, registrationId, counterId);
logRecordedPosition = NULL_POSITION;
}
private void loadSnapshot(final RecordingLog.Snapshot snapshot, final AeronArchive archive)
{
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final int sessionId = (int)archive.startReplay(snapshot.recordingId, 0, NULL_LENGTH, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final ConsensusModuleSnapshotLoader snapshotLoader = new ConsensusModuleSnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new ClusterException("snapshot ended unexpectedly");
}
}
idle(fragments);
}
final int appVersion = snapshotLoader.appVersion();
if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion))
{
throw new ClusterException(
"incompatible version: " + SemanticVersion.toString(ctx.appVersion()) +
" snapshot=" + SemanticVersion.toString(appVersion));
}
final TimeUnit timeUnit = snapshotLoader.timeUnit();
if (timeUnit != clusterTimeUnit)
{
throw new ClusterException("incompatible time unit: " + clusterTimeUnit + " snapshot=" + timeUnit);
}
pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE);
}
timerService.currentTickTime(clusterClock.time());
leadershipTermId(snapshot.leadershipTermId);
commitPosition.setOrdered(snapshot.logPosition);
expectedAckPosition = snapshot.logPosition;
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int snapshotsCount = plan.snapshots.size();
if (snapshotsCount > 0)
{
final long[] serviceSnapshotRecordingIds = new long[snapshotsCount - 1];
final RecordingLog.Snapshot snapshot = plan.snapshots.get(0);
for (int i = 1; i < snapshotsCount; i++)
{
final RecordingLog.Snapshot serviceSnapshot = plan.snapshots.get(i);
serviceSnapshotRecordingIds[serviceSnapshot.serviceId] = serviceSnapshot.recordingId;
}
return RecoveryState.allocate(
aeron,
snapshot.leadershipTermId,
snapshot.logPosition,
snapshot.timestamp,
ctx.clusterId(),
serviceSnapshotRecordingIds);
}
return RecoveryState.allocate(aeron, leadershipTermId, 0, 0, ctx.clusterId());
}
private DynamicJoin requiresDynamicJoin()
{
if (0 == activeMembers.length && null != ctx.clusterConsensusEndpoints())
{
return new DynamicJoin(ctx.clusterConsensusEndpoints(), archive, consensusPublisher, ctx, this);
}
return null;
}
private void captureServiceClientIds()
{
for (int i = 0, length = serviceClientIds.length; i < length; i++)
{
final ServiceAck serviceAck = serviceAckQueues[i].pollFirst();
serviceClientIds[i] = Objects.requireNonNull(serviceAck).relevantId();
}
}
private void handleMemberRemovals(final long commitPosition)
{
ClusterMember[] members = activeMembers;
for (final ClusterMember member : activeMembers)
{
if (member.hasRequestedRemove() && member.removalPosition() <= commitPosition)
{
if (member.id() == memberId)
{
state(ConsensusModule.State.QUITTING);
}
members = ClusterMember.removeMember(members, member.id());
clusterMemberByIdMap.remove(member.id());
clusterMemberByIdMap.compact();
member.closePublication(ctx.countedErrorHandler());
logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint());
pendingMemberRemovals--;
}
}
activeMembers = members;
rankedPositions = new long[ClusterMember.quorumThreshold(members.length)];
}
private int updateLeaderPosition(final long nowNs)
{
if (null != appendPosition)
{
return updateLeaderPosition(nowNs, appendPosition.get());
}
return 0;
}
int updateLeaderPosition(final long nowNs, final long position)
{
thisMember.logPosition(position).timeOfLastAppendPositionNs(nowNs);
final long commitPosition = min(quorumPosition(activeMembers, rankedPositions), position);
if (commitPosition > this.commitPosition.getWeak() ||
nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatIntervalNs))
{
for (final ClusterMember member : activeMembers)
{
if (member.id() != memberId)
{
consensusPublisher.commitPosition(
member.publication(), leadershipTermId, commitPosition, memberId);
}
}
this.commitPosition.setOrdered(commitPosition);
timeOfLastLogUpdateNs = nowNs;
clearUncommittedEntriesTo(commitPosition);
if (pendingMemberRemovals > 0)
{
handleMemberRemovals(commitPosition);
}
return 1;
}
return 0;
}
LogReplication newLogReplication(
final String leaderArchiveEndpoint, final long leaderRecordingId, final long stopPosition, final long nowNs)
{
return new LogReplication(
archive,
leaderRecordingId,
logRecordingId,
stopPosition,
leaderArchiveEndpoint,
ctx.replicationChannel(),
ctx.leaderHeartbeatTimeoutNs(),
ctx.leaderHeartbeatIntervalNs(),
nowNs);
}
private int updateFollowerPosition(final long nowNs)
{
final long recordedPosition = null != appendPosition ? appendPosition.get() : logRecordedPosition;
final long position = Math.max(recordedPosition, lastAppendPosition);
if ((recordedPosition > lastAppendPosition ||
nowNs >= (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) &&
consensusPublisher.appendPosition(leaderMember.publication(), leadershipTermId, position, memberId))
{
lastAppendPosition = position;
timeOfLastAppendPositionNs = nowNs;
return 1;
}
return 0;
}
private void clearSessionsAfter(final long logPosition)
{
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
if (session.openedLogPosition() > logPosition)
{
i.remove();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election");
session.close(ctx.countedErrorHandler());
}
}
for (final ClusterSession session : pendingSessions)
{
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election");
session.close(ctx.countedErrorHandler());
}
pendingSessions.clear();
}
private void clearUncommittedEntriesTo(final long commitPosition)
{
if (uncommittedServiceMessages > 0)
{
pendingServiceMessageHeadOffset -= pendingServiceMessages.consume(
leaderServiceSessionMessageSweeper, Integer.MAX_VALUE);
}
while (uncommittedTimers.peekLong() <= commitPosition)
{
uncommittedTimers.pollLong();
uncommittedTimers.pollLong();
}
while (true)
{
final ClusterSession clusterSession = uncommittedClosedSessions.peekFirst();
if (null == clusterSession || clusterSession.closedLogPosition() > commitPosition)
{
break;
}
uncommittedClosedSessions.pollFirst();
}
}
private void restoreUncommittedEntries(final long commitPosition)
{
for (final LongArrayQueue.LongIterator i = uncommittedTimers.iterator(); i.hasNext(); )
{
final long appendPosition = i.nextValue();
final long correlationId = i.nextValue();
if (appendPosition > commitPosition)
{
timerService.scheduleTimerForCorrelationId(correlationId, timerService.currentTickTime());
}
}
uncommittedTimers.clear();
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
pendingServiceMessageHeadOffset = 0;
if (uncommittedServiceMessages > 0)
{
pendingServiceMessages.consume(leaderServiceSessionMessageSweeper, Integer.MAX_VALUE);
pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE);
uncommittedServiceMessages = 0;
}
ClusterSession session;
while (null != (session = uncommittedClosedSessions.pollFirst()))
{
if (session.closedLogPosition() > commitPosition)
{
session.closedLogPosition(NULL_POSITION);
session.state(CLOSING);
sessionByIdMap.put(session.id(), session);
}
}
}
private void enterElection()
{
role(Cluster.Role.FOLLOWER);
election = new Election(
false,
leadershipTermId,
commitPosition.getWeak(),
null != appendPosition ? appendPosition.get() : recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
}
private void idle()
{
checkInterruptStatus();
aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
idleStrategy.idle();
pollArchiveEvents();
}
private void idle(final int workCount)
{
checkInterruptStatus();
aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
idleStrategy.idle(workCount);
if (0 == workCount)
{
pollArchiveEvents();
}
}
private static void checkInterruptStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new AgentTerminationException("interrupted");
}
}
private void takeSnapshot(final long timestamp, final long logPosition, final ServiceAck[] serviceAcks)
{
final long recordingId;
try (ExclusivePublication publication = aeron.addExclusivePublication(
ctx.snapshotChannel(), ctx.snapshotStreamId()))
{
final String channel = ChannelUri.addSessionId(ctx.snapshotChannel(), publication.sessionId());
archive.startRecording(channel, ctx.snapshotStreamId(), LOCAL, true);
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, logPosition, replayLeadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
}
catch (final ArchiveException ex)
{
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
throw ex;
}
final long termBaseLogPosition = recordingLog.getTermEntry(replayLeadershipTermId).termBaseLogPosition;
for (int serviceId = serviceAcks.length - 1; serviceId >= 0; serviceId--)
{
final long snapshotId = serviceAcks[serviceId].relevantId();
recordingLog.appendSnapshot(
snapshotId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, serviceId);
}
recordingLog.appendSnapshot(
recordingId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, SERVICE_ID);
recordingLog.force(ctx.fileSyncLevel());
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), Aeron.NULL_VALUE);
ctx.snapshotCounter().incrementOrdered();
final long nowNs = clusterClock.timeNanos();
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityNs(nowNs);
}
}
private void awaitRecordingComplete(
final long recordingId, final long position, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
while (counters.getCounterValue(counterId) < position)
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new ClusterException("recording has stopped unexpectedly: " + recordingId);
}
}
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
return counterId;
}
private void snapshotState(
final ExclusivePublication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion());
snapshotTaker.snapshotConsensusModuleState(
nextSessionId, nextServiceSessionId, logServiceSessionId, pendingServiceMessages.size());
snapshotTaker.snapshotClusterMembers(memberId, highMemberId, activeMembers);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN || session.state() == CLOSED)
{
snapshotTaker.snapshotSession(session);
}
}
timerService.snapshot(snapshotTaker);
snapshotTaker.snapshot(pendingServiceMessages);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion());
}
private void clusterMemberJoined(final int memberId, final ClusterMember[] newMembers)
{
highMemberId = Math.max(highMemberId, memberId);
final ClusterMember eventMember = ClusterMember.findMember(newMembers, memberId);
if (null != eventMember)
{
if (null == eventMember.publication())
{
ClusterMember.addConsensusPublication(
eventMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
}
activeMembers = ClusterMember.addMember(activeMembers, eventMember);
clusterMemberByIdMap.put(memberId, eventMember);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
}
}
private void clusterMemberQuit(final int memberId)
{
activeMembers = ClusterMember.removeMember(activeMembers, memberId);
clusterMemberByIdMap.remove(memberId);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
}
private void onUnavailableIngressImage(final Image image)
{
ingressAdapter.freeSessionBuffer(image.sessionId());
}
private void enqueueServiceSessionMessage(
final MutableDirectBuffer buffer, final int offset, final int length, final long clusterSessionId)
{
final int headerOffset = offset - SessionMessageHeaderDecoder.BLOCK_LENGTH;
final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset();
buffer.putLong(clusterSessionIdOffset, clusterSessionId, SessionMessageHeaderDecoder.BYTE_ORDER);
buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderDecoder.BYTE_ORDER);
if (!pendingServiceMessages.append(buffer, offset - SESSION_HEADER_LENGTH, length + SESSION_HEADER_LENGTH))
{
throw new ClusterException("pending service message buffer capacity: " + pendingServiceMessages.size());
}
}
private boolean serviceSessionMessageAppender(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int headerOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH;
final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long clusterSessionId = buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
final long appendPosition = logPublisher.appendMessage(
leadershipTermId,
clusterSessionId,
clusterClock.time(),
buffer,
offset + SESSION_HEADER_LENGTH,
length - SESSION_HEADER_LENGTH);
if (appendPosition > 0)
{
++uncommittedServiceMessages;
logServiceSessionId = clusterSessionId;
pendingServiceMessageHeadOffset = headOffset;
buffer.putLong(timestampOffset, appendPosition, SessionMessageHeaderEncoder.BYTE_ORDER);
return true;
}
return false;
}
private boolean serviceSessionMessageReset(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int timestampOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
if (appendPosition < Long.MAX_VALUE)
{
buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderEncoder.BYTE_ORDER);
return true;
}
return false;
}
private boolean leaderServiceSessionMessageSweeper(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int timestampOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
if (appendPosition <= commitPosition.getWeak())
{
--uncommittedServiceMessages;
return true;
}
return false;
}
private boolean followerServiceSessionMessageSweeper(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int clusterSessionIdOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
return buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER) <= logServiceSessionId;
}
private void onUnavailableCounter(final CountersReader counters, final long registrationId, final int counterId)
{
if (ConsensusModule.State.TERMINATING != state && ConsensusModule.State.QUITTING != state)
{
for (final long clientId : serviceClientIds)
{
if (registrationId == clientId)
{
ctx.countedErrorHandler().onError(new ClusterException(
"Aeron client in service closed unexpectedly", WARN));
state(ConsensusModule.State.CLOSED);
return;
}
}
if (null != appendPosition && appendPosition.registrationId() == registrationId)
{
appendPosition = null;
logSubscriptionId = NULL_VALUE;
if (null != election)
{
election.handleError(
clusterClock.timeNanos(), new ClusterException("log recording ended unexpectedly", WARN));
}
else if (NULL_POSITION == terminationPosition)
{
ctx.countedErrorHandler().onError(new ClusterException("log recording ended unexpectedly", WARN));
enterElection();
}
}
}
}
private void closeAndTerminate()
{
tryStopLogRecording();
state(ConsensusModule.State.CLOSED);
terminateAgent();
}
private void unexpectedTermination()
{
aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId);
serviceProxy.terminationPosition(0, ctx.countedErrorHandler());
tryStopLogRecording();
state(ConsensusModule.State.CLOSED);
terminateAgent();
}
private void terminateAgent()
{
try
{
ctx.terminationHook().run();
}
catch (final Throwable ex)
{
ctx.countedErrorHandler().onError(ex);
}
throw new ClusterTerminationException();
}
private void tryStopLogRecording()
{
appendPosition = null;
if (NULL_VALUE != logSubscriptionId && archive.archiveProxy().publication().isConnected())
{
try
{
archive.tryStopRecording(logSubscriptionId);
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException(ex, WARN));
}
logSubscriptionId = NULL_VALUE;
}
if (NULL_VALUE != logRecordingId)
{
try
{
archive.tryStopRecordingByIdentity(logRecordingId);
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException(ex, WARN));
}
}
}
private long getLastAppendedPosition()
{
idleStrategy.reset();
while (true)
{
final long appendPosition = archive.getStopPosition(logRecordingId);
if (NULL_POSITION != appendPosition)
{
return appendPosition;
}
idle();
}
}
private void appendDynamicJoinTermAndSnapshots()
{
if (!dynamicJoinSnapshots.isEmpty())
{
final RecordingLog.Snapshot lastSnapshot = dynamicJoinSnapshots.get(dynamicJoinSnapshots.size() - 1);
recordingLog.appendTerm(
logRecordingId,
lastSnapshot.leadershipTermId,
lastSnapshot.termBaseLogPosition,
lastSnapshot.timestamp);
for (int i = dynamicJoinSnapshots.size() - 1; i >= 0; i--)
{
final RecordingLog.Snapshot snapshot = dynamicJoinSnapshots.get(i);
recordingLog.appendSnapshot(
snapshot.recordingId,
snapshot.leadershipTermId,
snapshot.termBaseLogPosition,
snapshot.logPosition,
snapshot.timestamp,
snapshot.serviceId);
}
dynamicJoinSnapshots.clear();
}
}
private void connectIngress()
{
if (!ctx.ingressChannel().contains(ENDPOINT_PARAM_NAME))
{
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.ingressEndpoint());
ingressAdapter.connect(aeron.addSubscription(
ingressUri.toString(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage));
}
else if (Cluster.Role.LEADER == role)
{
ingressAdapter.connect(aeron.addSubscription(
ctx.ingressChannel(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage));
}
}
public String toString()
{
return "ConsensusModuleAgent{" +
"election=" + election +
'}';
}
}
| aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java | /*
* Copyright 2014-2021 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.client.ArchiveException;
import io.aeron.archive.client.RecordingSignalPoller;
import io.aeron.archive.codecs.*;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.client.AeronCluster;
import io.aeron.cluster.client.ClusterException;
import io.aeron.cluster.codecs.MessageHeaderDecoder;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.exceptions.AeronException;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.security.Authenticator;
import io.aeron.status.ReadableCounter;
import org.agrona.*;
import org.agrona.collections.*;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static io.aeron.Aeron.NULL_VALUE;
import static io.aeron.CommonContext.*;
import static io.aeron.archive.client.AeronArchive.NULL_LENGTH;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.archive.client.ReplayMerge.LIVE_ADD_MAX_WINDOW;
import static io.aeron.archive.codecs.SourceLocation.LOCAL;
import static io.aeron.cluster.ClusterMember.quorumPosition;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.*;
import static io.aeron.cluster.client.AeronCluster.SESSION_HEADER_LENGTH;
import static io.aeron.cluster.service.ClusteredServiceContainer.Configuration.MARK_FILE_UPDATE_INTERVAL_NS;
import static io.aeron.exceptions.AeronException.Category.WARN;
import static java.lang.Math.min;
import static org.agrona.BitUtil.findNextPositivePowerOfTwo;
final class ConsensusModuleAgent implements Agent
{
static final long SLOW_TICK_INTERVAL_NS = TimeUnit.MILLISECONDS.toNanos(10);
private static final int SERVICE_MESSAGE_LIMIT = 20;
private final long sessionTimeoutNs;
private final long leaderHeartbeatIntervalNs;
private final long leaderHeartbeatTimeoutNs;
private long unavailableCounterHandlerRegistrationId;
private long nextSessionId = 1;
private long nextServiceSessionId = Long.MIN_VALUE + 1;
private long logServiceSessionId = Long.MIN_VALUE;
private long leadershipTermId = NULL_VALUE;
private long replayLeadershipTermId = NULL_VALUE;
private long expectedAckPosition = 0;
private long serviceAckId = 0;
private long terminationPosition = NULL_POSITION;
private long notifiedCommitPosition = 0;
private long lastAppendPosition = 0;
private long timeOfLastLogUpdateNs = 0;
private long timeOfLastAppendPositionNs = 0;
private long slowTickDeadlineNs = 0;
private long markFileUpdateDeadlineNs = 0;
private int pendingServiceMessageHeadOffset = 0;
private int uncommittedServiceMessages = 0;
private int memberId;
private int highMemberId;
private int pendingMemberRemovals = 0;
private long logPublicationChannelTag;
private ReadableCounter appendPosition = null;
private final Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role = Cluster.Role.FOLLOWER;
private ClusterMember[] activeMembers;
private ClusterMember[] passiveMembers = ClusterMember.EMPTY_MEMBERS;
private ClusterMember leaderMember;
private ClusterMember thisMember;
private long[] rankedPositions;
private final long[] serviceClientIds;
private final ArrayDeque<ServiceAck>[] serviceAckQueues;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final ClusterClock clusterClock;
private final TimeUnit clusterTimeUnit;
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ConsensusModuleAdapter consensusModuleAdapter;
private final ServiceProxy serviceProxy;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private final LogAdapter logAdapter;
private final ConsensusAdapter consensusAdapter;
private final ConsensusPublisher consensusPublisher = new ConsensusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final ArrayList<ClusterSession> redirectSessions = new ArrayList<>();
private final Int2ObjectHashMap<ClusterMember> clusterMemberByIdMap = new Int2ObjectHashMap<>();
private final Long2LongCounterMap expiredTimerCountByCorrelationIdMap = new Long2LongCounterMap(0);
private final ArrayDeque<ClusterSession> uncommittedClosedSessions = new ArrayDeque<>();
private final LongArrayQueue uncommittedTimers = new LongArrayQueue(Long.MAX_VALUE);
private final ExpandableRingBuffer pendingServiceMessages = new ExpandableRingBuffer();
private final ExpandableRingBuffer.MessageConsumer serviceSessionMessageAppender =
this::serviceSessionMessageAppender;
private final ExpandableRingBuffer.MessageConsumer leaderServiceSessionMessageSweeper =
this::leaderServiceSessionMessageSweeper;
private final ExpandableRingBuffer.MessageConsumer followerServiceSessionMessageSweeper =
this::followerServiceSessionMessageSweeper;
private final Authenticator authenticator;
private final ClusterSessionProxy sessionProxy;
private final Aeron aeron;
private final ConsensusModule.Context ctx;
private final IdleStrategy idleStrategy;
private final RecordingLog recordingLog;
private final ArrayList<RecordingLog.Snapshot> dynamicJoinSnapshots = new ArrayList<>();
private RecordingLog.RecoveryPlan recoveryPlan;
private AeronArchive archive;
private RecordingSignalPoller recordingSignalPoller;
private Election election;
private DynamicJoin dynamicJoin;
private ClusterTermination clusterTermination;
private long logSubscriptionId = NULL_VALUE;
private long logRecordingId = NULL_VALUE;
private long logRecordedPosition = NULL_POSITION;
private String liveLogDestination;
private String catchupLogDestination;
private String ingressEndpoints;
ConsensusModuleAgent(final ConsensusModule.Context ctx)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.clusterClock = ctx.clusterClock();
this.clusterTimeUnit = clusterClock.timeUnit();
this.sessionTimeoutNs = ctx.sessionTimeoutNs();
this.leaderHeartbeatIntervalNs = ctx.leaderHeartbeatIntervalNs();
this.leaderHeartbeatTimeoutNs = ctx.leaderHeartbeatTimeoutNs();
this.egressPublisher = ctx.egressPublisher();
this.moduleState = ctx.moduleStateCounter();
this.commitPosition = ctx.commitPositionCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = ctx.logPublisher();
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(
this,
clusterTimeUnit,
0,
findNextPositivePowerOfTwo(clusterTimeUnit.convert(ctx.wheelTickResolutionNs(), TimeUnit.NANOSECONDS)),
ctx.ticksPerWheel());
this.activeMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new ClusterSessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.clusterRoleCounter = ctx.clusterNodeRoleCounter();
this.markFile = ctx.clusterMarkFile();
this.recordingLog = ctx.recordingLog();
this.serviceClientIds = new long[ctx.serviceCount()];
Arrays.fill(serviceClientIds, NULL_VALUE);
this.serviceAckQueues = ServiceAck.newArray(ctx.serviceCount());
this.highMemberId = ClusterMember.highMemberId(activeMembers);
aeronClientInvoker = aeron.conductorAgentInvoker();
aeronClientInvoker.invoke();
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
role(Cluster.Role.FOLLOWER);
ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap);
thisMember = ClusterMember.determineMember(activeMembers, ctx.clusterMemberId(), ctx.memberEndpoints());
leaderMember = thisMember;
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
if (!consensusUri.containsKey(ENDPOINT_PARAM_NAME))
{
consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint());
}
consensusAdapter = new ConsensusAdapter(
aeron.addSubscription(consensusUri.toString(), ctx.consensusStreamId()), this);
ClusterMember.addConsensusPublications(activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron);
ingressAdapter = new IngressAdapter(ctx.ingressFragmentLimit(), this);
logAdapter = new LogAdapter(this, ctx.logFragmentLimit());
consensusModuleAdapter = new ConsensusModuleAdapter(
aeron.addSubscription(ctx.controlChannel(), ctx.consensusModuleStreamId()), this);
serviceProxy = new ServiceProxy(aeron.addPublication(ctx.controlChannel(), ctx.serviceStreamId()));
authenticator = ctx.authenticatorSupplier().get();
}
/**
* {@inheritDoc}
*/
public void onClose()
{
if (!aeron.isClosed())
{
aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId);
tryStopLogRecording();
if (!ctx.ownsAeronClient())
{
logPublisher.disconnect(ctx.countedErrorHandler());
logAdapter.disconnect(ctx.countedErrorHandler());
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
for (final ClusterSession session : sessionByIdMap.values())
{
session.close(errorHandler);
}
CloseHelper.close(errorHandler, ingressAdapter);
ClusterMember.closeConsensusPublications(errorHandler, activeMembers);
CloseHelper.close(errorHandler, consensusAdapter);
CloseHelper.close(errorHandler, serviceProxy);
CloseHelper.close(errorHandler, consensusModuleAdapter);
CloseHelper.close(errorHandler, archive);
}
state(ConsensusModule.State.CLOSED);
}
markFile.updateActivityTimestamp(NULL_VALUE);
ctx.close();
}
/**
* {@inheritDoc}
*/
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext().clone());
recordingSignalPoller = new RecordingSignalPoller(
archive.controlSessionId(), archive.controlResponsePoller().subscription());
if (null == (dynamicJoin = requiresDynamicJoin()))
{
final long lastTermRecordingId = recordingLog.findLastTermRecordingId();
if (NULL_VALUE != lastTermRecordingId)
{
archive.tryStopRecordingByIdentity(lastTermRecordingId);
}
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
if (null != recoveryPlan.log)
{
logRecordingId = recoveryPlan.log.recordingId;
}
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
if (!recoveryPlan.snapshots.isEmpty())
{
loadSnapshot(recoveryPlan.snapshots.get(0), archive);
}
while (!ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues))
{
idle(consensusModuleAdapter.poll());
}
captureServiceClientIds();
++serviceAckId;
}
election = new Election(
true,
recoveryPlan.lastLeadershipTermId,
commitPosition.getWeak(),
recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
state(ConsensusModule.State.ACTIVE);
}
unavailableCounterHandlerRegistrationId = aeron.addUnavailableCounterHandler(this::onUnavailableCounter);
}
/**
* {@inheritDoc}
*/
public int doWork()
{
int workCount = 0;
final long now = clusterClock.time();
final long nowNs = clusterTimeUnit.toNanos(now);
if (nowNs >= slowTickDeadlineNs)
{
slowTickDeadlineNs = nowNs + SLOW_TICK_INTERVAL_NS;
workCount += slowTickWork(clusterTimeUnit.toMillis(now), nowNs);
}
workCount += consensusAdapter.poll();
if (null != dynamicJoin)
{
workCount += dynamicJoin.doWork(nowNs);
}
else if (null != election)
{
workCount += election.doWork(nowNs);
}
else
{
workCount += consensusWork(now, nowNs);
}
return workCount;
}
/**
* {@inheritDoc}
*/
public String roleName()
{
return "consensus-module_" + ctx.clusterId() + "_" + memberId;
}
void onSessionConnect(
final long correlationId,
final int responseStreamId,
final int version,
final String responseChannel,
final byte[] encodedCredentials)
{
final long clusterSessionId = Cluster.Role.LEADER == role ? nextSessionId++ : NULL_VALUE;
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.connect(aeron);
final long now = clusterClock.time();
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
if (Cluster.Role.LEADER != role)
{
redirectSessions.add(session);
}
else
{
if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version))
{
final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) +
", cluster is " + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION);
session.reject(EventCode.ERROR, detail);
rejectedSessions.add(session);
}
else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions())
{
session.reject(EventCode.ERROR, SESSION_LIMIT_MSG);
rejectedSessions.add(session);
}
else
{
authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now));
pendingSessions.add(session);
}
}
}
void onSessionClose(final long leadershipTermId, final long clusterSessionId)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
session.closing(CloseReason.CLIENT_ACTION);
session.disconnect(ctx.countedErrorHandler());
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
sessionByIdMap.remove(clusterSessionId);
session.close(ctx.countedErrorHandler());
}
}
}
}
ControlledFragmentAssembler.Action onIngressMessage(
final long leadershipTermId,
final long clusterSessionId,
final DirectBuffer buffer,
final int offset,
final int length)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
final long now = clusterClock.time();
if (logPublisher.appendMessage(leadershipTermId, clusterSessionId, now, buffer, offset, length) > 0)
{
session.timeOfLastActivityNs(clusterTimeUnit.toNanos(now));
return ControlledFragmentHandler.Action.CONTINUE;
}
else
{
return ControlledFragmentHandler.Action.ABORT;
}
}
}
return ControlledFragmentHandler.Action.CONTINUE;
}
void onSessionKeepAlive(final long leadershipTermId, final long clusterSessionId)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
session.timeOfLastActivityNs(clusterTimeUnit.toNanos(clusterClock.time()));
}
}
}
void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] encodedCredentials)
{
if (Cluster.Role.LEADER == role)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long now = clusterClock.time();
final long nowMs = clusterTimeUnit.toMillis(now);
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
authenticator.onChallengeResponse(clusterSessionId, encodedCredentials, nowMs);
break;
}
}
}
}
boolean onTimerEvent(final long correlationId)
{
final long appendPosition = logPublisher.appendTimer(correlationId, leadershipTermId, clusterClock.time());
if (appendPosition > 0)
{
uncommittedTimers.offerLong(appendPosition);
uncommittedTimers.offerLong(correlationId);
return true;
}
return false;
}
void onCanvassPosition(
final long logLeadershipTermId,
final long logPosition,
final long leadershipTermId,
final int followerMemberId)
{
if (null != election)
{
election.onCanvassPosition(logLeadershipTermId, logPosition, leadershipTermId, followerMemberId);
}
else if (Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower && logLeadershipTermId <= this.leadershipTermId)
{
final RecordingLog.Entry currentTermEntry = recordingLog.getTermEntry(this.leadershipTermId);
final long termBaseLogPosition = currentTermEntry.termBaseLogPosition;
final long timestamp = ctx.clusterClock().timeNanos();
final long nextLogLeadershipTermId;
final long nextTermBaseLogPosition;
final long nextLogPosition;
if (logLeadershipTermId < this.leadershipTermId)
{
final RecordingLog.Entry nextLogEntry = recordingLog.findTermEntry(logLeadershipTermId + 1);
nextLogLeadershipTermId = null != nextLogEntry ?
nextLogEntry.leadershipTermId : this.leadershipTermId;
nextTermBaseLogPosition = null != nextLogEntry ?
nextLogEntry.termBaseLogPosition : termBaseLogPosition;
nextLogPosition = null != nextLogEntry ? nextLogEntry.logPosition : NULL_POSITION;
}
else
{
nextLogLeadershipTermId = NULL_VALUE;
nextTermBaseLogPosition = NULL_POSITION;
nextLogPosition = NULL_POSITION;
}
final long appendPosition = logPublisher.position();
consensusPublisher.newLeadershipTerm(
follower.publication(),
logLeadershipTermId,
nextLogLeadershipTermId,
nextTermBaseLogPosition,
nextLogPosition,
this.leadershipTermId,
termBaseLogPosition,
appendPosition,
logRecordingId,
timestamp,
memberId,
logPublisher.sessionId(),
false);
}
}
}
void onRequestVote(
final long logLeadershipTermId, final long logPosition, final long candidateTermId, final int candidateId)
{
if (null != election)
{
election.onRequestVote(logLeadershipTermId, logPosition, candidateTermId, candidateId);
}
else if (candidateTermId > leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected vote request", WARN));
enterElection();
}
}
void onVote(
final long candidateTermId,
final long logLeadershipTermId,
final long logPosition,
final int candidateMemberId,
final int followerMemberId,
final boolean vote)
{
if (null != election)
{
election.onVote(
candidateTermId, logLeadershipTermId, logPosition, candidateMemberId, followerMemberId, vote);
}
}
void onNewLeadershipTerm(
final long logLeadershipTermId,
final long nextLeadershipTermId,
final long nextTermBaseLogPosition,
final long nextLogPosition,
final long leadershipTermId,
final long termBaseLogPosition,
final long logPosition,
final long leaderRecordingId,
final long timestamp,
final int leaderId,
final int logSessionId,
final boolean isStartup)
{
if (null != election)
{
election.onNewLeadershipTerm(
logLeadershipTermId,
nextLeadershipTermId,
nextTermBaseLogPosition,
nextLogPosition,
leadershipTermId,
termBaseLogPosition,
logPosition,
leaderRecordingId,
timestamp,
leaderId,
logSessionId,
isStartup);
}
else if (Cluster.Role.FOLLOWER == role &&
leadershipTermId == this.leadershipTermId &&
leaderId == leaderMember.id())
{
notifiedCommitPosition = Math.max(notifiedCommitPosition, logPosition);
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected new leadership term", WARN));
enterElection();
}
}
void onAppendPosition(final long leadershipTermId, final long logPosition, final int followerMemberId)
{
if (null != election)
{
election.onAppendPosition(leadershipTermId, logPosition, followerMemberId);
}
else if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower)
{
follower
.logPosition(logPosition)
.timeOfLastAppendPositionNs(clusterClock.timeNanos());
trackCatchupCompletion(follower, leadershipTermId);
}
}
}
void onCommitPosition(final long leadershipTermId, final long logPosition, final int leaderMemberId)
{
if (null != election)
{
election.onCommitPosition(leadershipTermId, logPosition, leaderMemberId);
}
else if (leadershipTermId == this.leadershipTermId &&
leaderMemberId == leaderMember.id() &&
Cluster.Role.FOLLOWER == role)
{
notifiedCommitPosition = logPosition;
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
else if (leadershipTermId > this.leadershipTermId && null == dynamicJoin)
{
ctx.countedErrorHandler().onError(new ClusterException("unexpected commit position", WARN));
enterElection();
}
}
void onCatchupPosition(
final long leadershipTermId, final long logPosition, final int followerMemberId, final String catchupEndpoint)
{
if (leadershipTermId <= this.leadershipTermId && Cluster.Role.LEADER == role)
{
final ClusterMember follower = clusterMemberByIdMap.get(followerMemberId);
if (null != follower && follower.catchupReplaySessionId() == NULL_VALUE)
{
final String channel = new ChannelUriStringBuilder()
.media(CommonContext.UDP_MEDIA)
.endpoint(catchupEndpoint)
.sessionId(logPublisher.sessionId())
.linger(0L)
.eos(Boolean.FALSE)
.build();
follower.catchupReplaySessionId(archive.startReplay(
logRecordingId, logPosition, Long.MAX_VALUE, channel, ctx.logStreamId()));
follower.catchupReplayCorrelationId(archive.lastCorrelationId());
}
}
}
void onStopCatchup(final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.replayLeadershipTermId && followerMemberId == memberId)
{
if (null != catchupLogDestination)
{
logAdapter.removeDestination(catchupLogDestination);
catchupLogDestination = null;
}
}
}
void onAddPassiveMember(final long correlationId, final String memberEndpoints)
{
if (null == election && null == dynamicJoin)
{
if (Cluster.Role.LEADER == role)
{
if (ClusterMember.notDuplicateEndpoint(passiveMembers, memberEndpoints))
{
final ClusterMember newMember = ClusterMember.parseEndpoints(++highMemberId, memberEndpoints);
newMember.correlationId(correlationId);
passiveMembers = ClusterMember.addMember(passiveMembers, newMember);
clusterMemberByIdMap.put(newMember.id(), newMember);
ClusterMember.addConsensusPublication(
newMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
logPublisher.addDestination(ctx.isLogMdc(), newMember.logEndpoint());
}
}
else if (Cluster.Role.FOLLOWER == role)
{
consensusPublisher.addPassiveMember(leaderMember.publication(), correlationId, memberEndpoints);
}
}
}
void onClusterMembersChange(
final long correlationId, final int leaderMemberId, final String activeMembers, final String passiveMembers)
{
if (null != dynamicJoin)
{
dynamicJoin.onClusterMembersChange(correlationId, leaderMemberId, activeMembers, passiveMembers);
}
}
void onSnapshotRecordingQuery(final long correlationId, final int requestMemberId)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember requester = clusterMemberByIdMap.get(requestMemberId);
if (null != requester)
{
consensusPublisher.snapshotRecording(
requester.publication(),
correlationId,
recoveryPlan,
ClusterMember.encodeAsString(activeMembers));
}
}
}
void onSnapshotRecordings(final long correlationId, final SnapshotRecordingsDecoder decoder)
{
if (null != dynamicJoin)
{
dynamicJoin.onSnapshotRecordings(correlationId, decoder);
}
}
void onJoinCluster(final long leadershipTermId, final int memberId)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
final long snapshotLeadershipTermId = recoveryPlan.snapshots.isEmpty() ?
NULL_VALUE : recoveryPlan.snapshots.get(0).leadershipTermId;
if (null != member && !member.hasRequestedJoin() && leadershipTermId <= snapshotLeadershipTermId)
{
if (null == member.publication())
{
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
final int streamId = ctx.consensusStreamId();
ClusterMember.addConsensusPublication(member, consensusUri, streamId, aeron);
logPublisher.addDestination(ctx.isLogMdc(), member.logEndpoint());
}
member.hasRequestedJoin(true);
}
}
}
void onTerminationPosition(final long leadershipTermId, final long logPosition)
{
if (leadershipTermId == this.leadershipTermId && Cluster.Role.FOLLOWER == role)
{
terminationPosition = logPosition;
timeOfLastLogUpdateNs = clusterClock.timeNanos();
}
}
void onTerminationAck(final long leadershipTermId, final long logPosition, final int memberId)
{
if (leadershipTermId == this.leadershipTermId &&
logPosition >= terminationPosition &&
Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
if (null != member)
{
member.hasTerminated(true);
if (clusterTermination.canTerminate(activeMembers, terminationPosition, clusterClock.timeNanos()))
{
recordingLog.commitLogPosition(leadershipTermId, terminationPosition);
closeAndTerminate();
}
}
}
}
void onBackupQuery(
final long correlationId,
final int responseStreamId,
final int version,
final String responseChannel,
final byte[] encodedCredentials)
{
if (null == election && null == dynamicJoin)
{
if (Cluster.Role.LEADER != role)
{
consensusPublisher.backupQuery(
leaderMember.publication(),
correlationId,
responseStreamId,
version,
responseChannel,
encodedCredentials);
}
else if (state == ConsensusModule.State.ACTIVE || state == ConsensusModule.State.SUSPENDED)
{
final ClusterSession session = new ClusterSession(NULL_VALUE, responseStreamId, responseChannel);
session.markAsBackupSession();
session.connect(aeron);
final long now = clusterClock.time();
session.lastActivityNs(clusterTimeUnit.toNanos(now), correlationId);
if (AeronCluster.Configuration.PROTOCOL_MAJOR_VERSION != SemanticVersion.major(version))
{
final String detail = SESSION_INVALID_VERSION_MSG + " " + SemanticVersion.toString(version) +
", cluster=" + SemanticVersion.toString(AeronCluster.Configuration.PROTOCOL_SEMANTIC_VERSION);
session.reject(EventCode.ERROR, detail);
rejectedSessions.add(session);
}
else if (pendingSessions.size() + sessionByIdMap.size() >= ctx.maxConcurrentSessions())
{
session.reject(EventCode.ERROR, SESSION_LIMIT_MSG);
rejectedSessions.add(session);
}
else
{
authenticator.onConnectRequest(session.id(), encodedCredentials, clusterTimeUnit.toMillis(now));
pendingSessions.add(session);
}
}
}
}
void onRemoveMember(final int memberId, final boolean isPassive)
{
if (null == election && Cluster.Role.LEADER == role)
{
final ClusterMember member = clusterMemberByIdMap.get(memberId);
if (null != member)
{
if (isPassive)
{
passiveMembers = ClusterMember.removeMember(passiveMembers, memberId);
member.closePublication(ctx.countedErrorHandler());
logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint());
clusterMemberByIdMap.remove(memberId);
clusterMemberByIdMap.compact();
}
else
{
final long now = clusterClock.time();
final long position = logPublisher.appendMembershipChangeEvent(
leadershipTermId,
now,
this.memberId,
activeMembers.length,
ChangeType.QUIT,
memberId,
ClusterMember.encodeAsString(ClusterMember.removeMember(activeMembers, memberId)));
if (position > 0)
{
timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs;
member.removalPosition(position);
pendingMemberRemovals++;
}
}
}
}
}
void onClusterMembersQuery(final long correlationId, final boolean isExtendedRequest)
{
if (isExtendedRequest)
{
serviceProxy.clusterMembersExtendedResponse(
correlationId, clusterClock.timeNanos(), leaderMember.id(), memberId, activeMembers, passiveMembers);
}
else
{
serviceProxy.clusterMembersResponse(
correlationId,
leaderMember.id(),
ClusterMember.encodeAsString(activeMembers),
ClusterMember.encodeAsString(passiveMembers));
}
}
void state(final ConsensusModule.State newState)
{
if (newState != state)
{
stateChange(state, newState, memberId);
state = newState;
if (!moduleState.isClosed())
{
moduleState.set(newState.code());
}
}
}
ConsensusModule.State state()
{
return state;
}
void stateChange(final ConsensusModule.State oldState, final ConsensusModule.State newState, final int memberId)
{
//System.out.println("CM State memberId=" + memberId + " " + oldState + " -> " + newState);
}
void role(final Cluster.Role newRole)
{
if (newRole != role)
{
roleChange(role, newRole, memberId);
role = newRole;
if (!clusterRoleCounter.isClosed())
{
clusterRoleCounter.set(newRole.code());
}
}
}
void roleChange(final Cluster.Role oldRole, final Cluster.Role newRole, final int memberId)
{
//System.out.println("CM Role memberId=" + memberId + " " + oldRole + " -> " + newRole);
}
Cluster.Role role()
{
return role;
}
long prepareForNewLeadership(final long logPosition)
{
role(Cluster.Role.FOLLOWER);
CloseHelper.close(ctx.countedErrorHandler(), ingressAdapter);
ClusterControl.ToggleState.deactivate(controlToggle);
if (null != catchupLogDestination)
{
logAdapter.removeDestination(catchupLogDestination);
catchupLogDestination = null;
}
if (null != liveLogDestination)
{
logAdapter.removeDestination(liveLogDestination);
liveLogDestination = null;
}
logAdapter.disconnect(ctx.countedErrorHandler());
logPublisher.disconnect(ctx.countedErrorHandler());
if (RecordingPos.NULL_RECORDING_ID != logRecordingId)
{
tryStopLogRecording();
lastAppendPosition = getLastAppendedPosition();
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
clearSessionsAfter(logPosition);
for (final ClusterSession session : sessionByIdMap.values())
{
session.disconnect(ctx.countedErrorHandler());
}
commitPosition.setOrdered(logPosition);
restoreUncommittedEntries(logPosition);
}
return lastAppendPosition;
}
void onServiceCloseSession(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closing(CloseReason.SERVICE_ACTION);
if (Cluster.Role.LEADER == role &&
logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = CloseReason.SERVICE_ACTION.name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
sessionByIdMap.remove(clusterSessionId);
session.close(ctx.countedErrorHandler());
}
}
}
void onServiceMessage(final long leadershipTermId, final DirectBuffer buffer, final int offset, final int length)
{
if (leadershipTermId == this.leadershipTermId)
{
enqueueServiceSessionMessage((MutableDirectBuffer)buffer, offset, length, nextServiceSessionId++);
}
}
void onScheduleTimer(final long correlationId, final long deadline)
{
if (expiredTimerCountByCorrelationIdMap.get(correlationId) == 0)
{
timerService.scheduleTimerForCorrelationId(correlationId, deadline);
}
else
{
expiredTimerCountByCorrelationIdMap.decrementAndGet(correlationId);
}
}
void onCancelTimer(final long correlationId)
{
timerService.cancelTimerByCorrelationId(correlationId);
}
void onServiceAck(
final long logPosition, final long timestamp, final long ackId, final long relevantId, final int serviceId)
{
captureServiceAck(logPosition, ackId, relevantId, serviceId);
if (ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues))
{
if (ConsensusModule.State.SNAPSHOT == state)
{
final ServiceAck[] serviceAcks = pollServiceAcks(logPosition, serviceId);
++serviceAckId;
takeSnapshot(timestamp, logPosition, serviceAcks);
if (null != clusterTermination)
{
serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler());
clusterTermination.deadlineNs(clusterClock.timeNanos() + ctx.terminationTimeoutNs());
state(ConsensusModule.State.TERMINATING);
}
else
{
state(ConsensusModule.State.ACTIVE);
if (Cluster.Role.LEADER == role)
{
ClusterControl.ToggleState.reset(controlToggle);
}
}
}
else if (ConsensusModule.State.QUITTING == state)
{
closeAndTerminate();
}
else if (ConsensusModule.State.TERMINATING == state)
{
if (null == clusterTermination)
{
consensusPublisher.terminationAck(
leaderMember.publication(), leadershipTermId, logPosition, memberId);
recordingLog.commitLogPosition(leadershipTermId, logPosition);
closeAndTerminate();
}
else
{
clusterTermination.onServicesTerminated();
if (clusterTermination.canTerminate(
activeMembers, terminationPosition, clusterClock.timeNanos()))
{
recordingLog.commitLogPosition(leadershipTermId, logPosition);
closeAndTerminate();
}
}
}
}
}
void onReplaySessionMessage(final long clusterSessionId, final long timestamp)
{
final ClusterSession clusterSession = sessionByIdMap.get(clusterSessionId);
if (null == clusterSession)
{
logServiceSessionId = clusterSessionId;
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
}
else
{
clusterSession.timeOfLastActivityNs(clusterTimeUnit.toNanos(timestamp));
}
}
void onReplayTimerEvent(final long correlationId)
{
if (!timerService.cancelTimerByCorrelationId(correlationId))
{
expiredTimerCountByCorrelationIdMap.getAndIncrement(correlationId);
}
}
void onReplaySessionOpen(
final long logPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(logPosition);
session.lastActivityNs(clusterTimeUnit.toNanos(timestamp), correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onReplaySessionClose(final long clusterSessionId, final CloseReason closeReason)
{
final ClusterSession clusterSession = sessionByIdMap.remove(clusterSessionId);
if (null != clusterSession)
{
clusterSession.closing(closeReason);
clusterSession.close(ctx.countedErrorHandler());
}
}
void onReplayClusterAction(final long leadershipTermId, final ClusterAction action)
{
if (leadershipTermId == this.replayLeadershipTermId)
{
if (ClusterAction.SUSPEND == action)
{
state(ConsensusModule.State.SUSPENDED);
}
else if (ClusterAction.RESUME == action)
{
state(ConsensusModule.State.ACTIVE);
}
else if (ClusterAction.SNAPSHOT == action)
{
state(ConsensusModule.State.SNAPSHOT);
}
}
}
void onReplayNewLeadershipTermEvent(
final long leadershipTermId,
final long logPosition,
final long timestamp,
final long termBaseLogPosition,
final TimeUnit timeUnit,
final int appVersion)
{
if (timeUnit != clusterTimeUnit)
{
ctx.countedErrorHandler().onError(new ClusterException(
"incompatible timestamp units: " + clusterTimeUnit + " log=" + timeUnit,
AeronException.Category.FATAL));
unexpectedTermination();
}
if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion))
{
ctx.countedErrorHandler().onError(new ClusterException(
"incompatible version: " + SemanticVersion.toString(ctx.appVersion()) +
" log=" + SemanticVersion.toString(appVersion),
AeronException.Category.FATAL));
unexpectedTermination();
}
leadershipTermId(leadershipTermId);
if (null != election)
{
election.onReplayNewLeadershipTermEvent(
logRecordingId, leadershipTermId, logPosition, timestamp, termBaseLogPosition);
}
}
void onReplayMembershipChange(
final long leadershipTermId,
final long logPosition,
final int leaderMemberId,
final ChangeType changeType,
final int memberId,
final String clusterMembers)
{
if (leadershipTermId == this.replayLeadershipTermId)
{
if (ChangeType.JOIN == changeType)
{
final ClusterMember[] newMembers = ClusterMember.parse(clusterMembers);
if (memberId == this.memberId)
{
activeMembers = newMembers;
clusterMemberByIdMap.clear();
clusterMemberByIdMap.compact();
ClusterMember.addClusterMemberIds(newMembers, clusterMemberByIdMap);
thisMember = ClusterMember.findMember(activeMembers, memberId);
leaderMember = ClusterMember.findMember(activeMembers, leaderMemberId);
ClusterMember.addConsensusPublications(
newMembers,
thisMember,
ChannelUri.parse(ctx.consensusChannel()),
ctx.consensusStreamId(),
aeron);
}
else
{
clusterMemberJoined(memberId, newMembers);
}
}
else if (ChangeType.QUIT == changeType)
{
if (memberId == this.memberId)
{
state(ConsensusModule.State.QUITTING);
}
else
{
clusterMemberQuit(memberId);
if (leaderMemberId == memberId && null == election)
{
commitPosition.proposeMaxOrdered(logPosition);
enterElection();
}
}
}
if (null != election)
{
election.onMembershipChange(activeMembers, changeType, memberId, logPosition);
}
}
}
void onLoadSession(
final long clusterSessionId,
final long correlationId,
final long openedPosition,
final long timeOfLastActivity,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
sessionByIdMap.put(clusterSessionId, new ClusterSession(
clusterSessionId,
correlationId,
openedPosition,
timeOfLastActivity,
responseStreamId,
responseChannel,
closeReason));
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadPendingMessage(final DirectBuffer buffer, final int offset, final int length)
{
pendingServiceMessages.append(buffer, offset, length);
}
void onLoadConsensusModuleState(
final long nextSessionId,
final long nextServiceSessionId,
final long logServiceSessionId,
final int pendingMessageCapacity)
{
this.nextSessionId = nextSessionId;
this.nextServiceSessionId = nextServiceSessionId;
this.logServiceSessionId = logServiceSessionId;
pendingServiceMessages.reset(pendingMessageCapacity);
}
void onLoadClusterMembers(final int memberId, final int highMemberId, final String members)
{
if (null == dynamicJoin && !ctx.clusterMembersIgnoreSnapshot())
{
if (NULL_VALUE == this.memberId)
{
this.memberId = memberId;
ctx.clusterMarkFile().memberId(memberId);
}
if (ClusterMember.EMPTY_MEMBERS == activeMembers)
{
activeMembers = ClusterMember.parse(members);
this.highMemberId = Math.max(ClusterMember.highMemberId(activeMembers), highMemberId);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
thisMember = clusterMemberByIdMap.get(memberId);
final ChannelUri consensusUri = ChannelUri.parse(ctx.consensusChannel());
consensusUri.put(ENDPOINT_PARAM_NAME, thisMember.consensusEndpoint());
ClusterMember.addConsensusPublications(
activeMembers, thisMember, consensusUri, ctx.consensusStreamId(), aeron);
}
}
}
int addLogPublication()
{
final long logPublicationTag = aeron.nextCorrelationId();
logPublicationChannelTag = aeron.nextCorrelationId();
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(ALIAS_PARAM_NAME, "log");
channelUri.put(TAGS_PARAM_NAME, logPublicationChannelTag + "," + logPublicationTag);
if (channelUri.isUdp())
{
if (!channelUri.containsKey(FLOW_CONTROL_PARAM_NAME))
{
final long timeout = TimeUnit.NANOSECONDS.toSeconds(ctx.leaderHeartbeatTimeoutNs());
channelUri.put(FLOW_CONTROL_PARAM_NAME, "min,t:" + timeout + "s");
}
if (ctx.isLogMdc())
{
channelUri.put(MDC_CONTROL_MODE_PARAM_NAME, MDC_CONTROL_MODE_MANUAL);
}
channelUri.put(SPIES_SIMULATE_CONNECTION_PARAM_NAME, Boolean.toString(activeMembers.length == 1));
}
if (null != recoveryPlan.log)
{
channelUri.initialPosition(
recoveryPlan.appendedLogPosition, recoveryPlan.log.initialTermId, recoveryPlan.log.termBufferLength);
channelUri.put(MTU_LENGTH_PARAM_NAME, Integer.toString(recoveryPlan.log.mtuLength));
}
final String channel = channelUri.toString();
final ExclusivePublication publication = aeron.addExclusivePublication(channel, ctx.logStreamId());
if (ctx.isLogMdc())
{
for (final ClusterMember member : activeMembers)
{
if (member.id() != memberId)
{
publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint());
}
}
for (final ClusterMember member : passiveMembers)
{
publication.asyncAddDestination("aeron:udp?endpoint=" + member.logEndpoint());
}
}
logPublisher.publication(publication);
return publication.sessionId();
}
void joinLogAsLeader(
final long leadershipTermId, final long logPosition, final int logSessionId, final boolean isStartup)
{
final boolean isIpc = ctx.logChannel().startsWith(IPC_CHANNEL);
final String channel = (isIpc ? "aeron:ipc" : "aeron:udp") +
"?tags=" + logPublicationChannelTag + "|session-id=" + logSessionId + "|alias=log";
leadershipTermId(leadershipTermId);
startLogRecording(channel, ctx.logStreamId(), SourceLocation.LOCAL);
createAppendPosition(logSessionId);
awaitServicesReady(
isIpc ? channel : SPY_PREFIX + channel,
ctx.logStreamId(),
logSessionId,
logPosition,
Long.MAX_VALUE,
isStartup,
Cluster.Role.LEADER);
}
void liveLogDestination(final String liveLogDestination)
{
this.liveLogDestination = liveLogDestination;
}
String liveLogDestination()
{
return liveLogDestination;
}
void catchupLogDestination(final String catchupLogDestination)
{
this.catchupLogDestination = catchupLogDestination;
}
String catchupLogDestination()
{
return catchupLogDestination;
}
void joinLogAsFollower(final Image image, final boolean isLeaderStartup)
{
final Subscription logSubscription = image.subscription();
final int streamId = logSubscription.streamId();
final String channel = logSubscription.channel();
startLogRecording(channel, streamId, SourceLocation.REMOTE);
createAppendPosition(image.sessionId());
appendDynamicJoinTermAndSnapshots();
logAdapter.image(image);
lastAppendPosition = image.joinPosition();
awaitServicesReady(
channel,
streamId,
image.sessionId(),
image.joinPosition(),
Long.MAX_VALUE,
isLeaderStartup,
Cluster.Role.FOLLOWER);
}
void awaitServicesReady(
final String logChannel,
final int streamId,
final int logSessionId,
final long logPosition,
final long maxLogPosition,
final boolean isStartup,
final Cluster.Role role)
{
serviceProxy.joinLog(
logPosition,
maxLogPosition,
memberId,
logSessionId,
streamId,
isStartup,
role,
logChannel);
expectedAckPosition = logPosition;
while (!ServiceAck.hasReached(logPosition, serviceAckId, serviceAckQueues))
{
idle(consensusModuleAdapter.poll());
}
ServiceAck.removeHead(serviceAckQueues);
++serviceAckId;
}
void leadershipTermId(final long leadershipTermId)
{
this.leadershipTermId = leadershipTermId;
this.replayLeadershipTermId = leadershipTermId;
}
LogReplay newLogReplay(final long logPosition, final long appendPosition)
{
return new LogReplay(
archive,
logRecordingId,
logPosition,
appendPosition,
logAdapter,
ctx);
}
int replayLogPoll(final LogAdapter logAdapter, final long stopPosition)
{
int workCount = 0;
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
final int fragments = logAdapter.poll(stopPosition);
final long position = logAdapter.position();
if (fragments > 0)
{
commitPosition.setOrdered(position);
}
else if (logAdapter.isImageClosed() && position < stopPosition)
{
throw new ClusterException("unexpected image close when replaying log: position=" + position);
}
workCount += fragments;
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
long logRecordingId()
{
return logRecordingId;
}
void logRecordingId(final long recordingId)
{
if (NULL_VALUE != recordingId)
{
logRecordingId = recordingId;
}
}
void truncateLogEntry(final long leadershipTermId, final long logPosition)
{
archive.stopAllReplays(logRecordingId);
archive.truncateRecording(logRecordingId, logPosition);
recordingLog.commitLogPosition(leadershipTermId, logPosition);
logAdapter.disconnect(ctx.countedErrorHandler(), logPosition);
}
boolean electionComplete()
{
final long logPosition = election.logPosition();
final long now = clusterClock.time();
final long nowNs = clusterTimeUnit.toNanos(now);
if (Cluster.Role.LEADER == role)
{
if (!logPublisher.isConnected() || !logPublisher.appendNewLeadershipTermEvent(
leadershipTermId,
now,
logPosition,
memberId,
logPublisher.sessionId(),
clusterTimeUnit,
ctx.appVersion()))
{
return false;
}
timeOfLastLogUpdateNs = nowNs - leaderHeartbeatIntervalNs;
timerService.currentTickTime(now);
ClusterControl.ToggleState.activate(controlToggle);
prepareSessionsForNewTerm(election.isLeaderStartup());
}
else
{
timeOfLastLogUpdateNs = nowNs;
timeOfLastAppendPositionNs = nowNs;
}
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), logRecordingId);
notifiedCommitPosition = logPosition;
commitPosition.setOrdered(logPosition);
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
updateMemberDetails(election.leader());
election = null;
connectIngress();
return true;
}
boolean dynamicJoinComplete()
{
if (0 == activeMembers.length)
{
activeMembers = dynamicJoin.clusterMembers();
ClusterMember.addClusterMemberIds(activeMembers, clusterMemberByIdMap);
leaderMember = dynamicJoin.leader();
ClusterMember.addConsensusPublications(
activeMembers, thisMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
}
if (NULL_VALUE == memberId)
{
memberId = dynamicJoin.memberId();
ctx.clusterMarkFile().memberId(memberId);
thisMember.id(memberId);
}
dynamicJoin = null;
election = new Election(
false,
leadershipTermId,
commitPosition.getWeak(),
recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
return true;
}
void trackCatchupCompletion(final ClusterMember follower, final long leadershipTermId)
{
if (NULL_VALUE != follower.catchupReplaySessionId())
{
if (follower.logPosition() >= logPublisher.position())
{
if (NULL_VALUE != follower.catchupReplayCorrelationId())
{
if (archive.archiveProxy().stopReplay(
follower.catchupReplaySessionId(), aeron.nextCorrelationId(), archive.controlSessionId()))
{
follower.catchupReplayCorrelationId(NULL_VALUE);
}
}
if (consensusPublisher.stopCatchup(follower.publication(), leadershipTermId, follower.id()))
{
follower.catchupReplaySessionId(NULL_VALUE);
}
}
}
}
void catchupInitiated(final long nowNs)
{
timeOfLastAppendPositionNs = nowNs;
}
int catchupPoll(final long limitPosition, final long nowNs)
{
int workCount = 0;
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
final int fragments = logAdapter.poll(Math.min(appendPosition.get(), limitPosition));
workCount += fragments;
if (fragments == 0 && logAdapter.image().isClosed())
{
throw new ClusterException("unexpected close replaying log: position=" + logAdapter.image().position());
}
}
final long appendPosition = logAdapter.position();
if (appendPosition > lastAppendPosition || nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs))
{
commitPosition.proposeMaxOrdered(appendPosition);
final ExclusivePublication publication = election.leader().publication();
if (consensusPublisher.appendPosition(publication, replayLeadershipTermId, appendPosition, memberId))
{
lastAppendPosition = appendPosition;
timeOfLastAppendPositionNs = nowNs;
}
}
if (nowNs > (timeOfLastAppendPositionNs + leaderHeartbeatTimeoutNs) && ConsensusModule.State.ACTIVE == state)
{
throw new ClusterException("no catchup progress", WARN);
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
boolean isCatchupNearLive(final long position)
{
final Image image = logAdapter.image();
if (null != image)
{
final long localPosition = image.position();
final long window = Math.min(image.termBufferLength() >> 2, LIVE_ADD_MAX_WINDOW);
return localPosition >= (position - window);
}
return false;
}
void stopAllCatchups()
{
for (final ClusterMember member : activeMembers)
{
if (member.catchupReplaySessionId() != NULL_VALUE)
{
if (member.catchupReplayCorrelationId() != NULL_VALUE)
{
try
{
archive.stopReplay(member.catchupReplaySessionId());
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException("catchup already stopped", ex, WARN));
}
}
member.catchupReplaySessionId(NULL_VALUE);
member.catchupReplayCorrelationId(NULL_VALUE);
}
}
}
void retrievedSnapshot(final long localRecordingId, final RecordingLog.Snapshot leaderSnapshot)
{
dynamicJoinSnapshots.add(new RecordingLog.Snapshot(
localRecordingId,
leaderSnapshot.leadershipTermId,
leaderSnapshot.termBaseLogPosition,
leaderSnapshot.logPosition,
leaderSnapshot.timestamp,
leaderSnapshot.serviceId));
}
Counter loadSnapshotsForDynamicJoin()
{
recoveryPlan = RecordingLog.createRecoveryPlan(dynamicJoinSnapshots);
if (null != recoveryPlan.log)
{
logRecordingId = recoveryPlan.log.recordingId;
}
final Counter recoveryStateCounter = addRecoveryStateCounter(recoveryPlan);
if (!recoveryPlan.snapshots.isEmpty())
{
loadSnapshot(recoveryPlan.snapshots.get(0), archive);
}
return recoveryStateCounter;
}
boolean pollForSnapshotLoadAck(final Counter recoveryStateCounter, final long nowNs)
{
consensusModuleAdapter.poll();
if (ServiceAck.hasReached(expectedAckPosition, serviceAckId, serviceAckQueues))
{
captureServiceClientIds();
++serviceAckId;
CloseHelper.close(ctx.countedErrorHandler(), recoveryStateCounter);
state(ConsensusModule.State.ACTIVE);
timeOfLastLogUpdateNs = nowNs;
leadershipTermId(recoveryPlan.lastLeadershipTermId);
return true;
}
return false;
}
int pollArchiveEvents()
{
int workCount = 0;
if (null != archive)
{
final RecordingSignalPoller poller = this.recordingSignalPoller;
workCount += poller.poll();
if (poller.isPollComplete())
{
final int templateId = poller.templateId();
if (ControlResponseDecoder.TEMPLATE_ID == templateId && poller.code() == ControlResponseCode.ERROR)
{
for (final ClusterMember member : activeMembers)
{
if (member.catchupReplayCorrelationId() != NULL_VALUE &&
member.catchupReplayCorrelationId() == poller.correlationId())
{
member.catchupReplaySessionId(NULL_VALUE);
member.catchupReplayCorrelationId(NULL_VALUE);
ctx.countedErrorHandler().onError(new ClusterException(
"catchup replay failed - " + poller.errorMessage(), WARN));
return workCount;
}
}
final ArchiveException ex = new ArchiveException(
poller.errorMessage(), (int)poller.relevantId(), poller.correlationId());
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
if (null != election)
{
election.handleError(clusterClock.timeNanos(), ex);
}
}
else if (RecordingSignalEventDecoder.TEMPLATE_ID == templateId)
{
final long recordingId = poller.recordingId();
final long position = poller.recordingPosition();
final RecordingSignal signal = poller.recordingSignal();
if (RecordingSignal.STOP == signal && recordingId == logRecordingId)
{
this.logRecordedPosition = position;
}
if (null != election)
{
election.onRecordingSignal(poller.correlationId(), recordingId, position, signal);
}
if (null != dynamicJoin)
{
dynamicJoin.onRecordingSignal(poller.correlationId(), recordingId, position, signal);
}
}
}
else if (0 == workCount && !poller.subscription().isConnected())
{
ctx.countedErrorHandler().onError(new ClusterException("local archive is not connected", WARN));
unexpectedTermination();
}
}
return workCount;
}
private void startLogRecording(final String channel, final int streamId, final SourceLocation sourceLocation)
{
try
{
final long logRecordingId = recordingLog.findLastTermRecordingId();
if (RecordingPos.NULL_RECORDING_ID == logRecordingId)
{
logSubscriptionId = archive.startRecording(channel, streamId, sourceLocation, true);
}
else
{
logSubscriptionId = archive.extendRecording(logRecordingId, channel, streamId, sourceLocation, true);
}
}
catch (final ArchiveException ex)
{
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
throw ex;
}
}
private void prepareSessionsForNewTerm(final boolean isStartup)
{
if (isStartup)
{
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.closing(CloseReason.TIMEOUT);
}
}
}
else
{
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.connect(aeron);
}
}
final long nowNs = clusterClock.timeNanos();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
session.timeOfLastActivityNs(nowNs);
session.hasNewLeaderEventPending(true);
}
}
}
}
private void updateMemberDetails(final ClusterMember newLeader)
{
leaderMember = newLeader;
for (final ClusterMember clusterMember : activeMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMember.id());
}
ingressEndpoints = ClusterMember.ingressEndpoints(activeMembers);
}
private int slowTickWork(final long nowMs, final long nowNs)
{
int workCount = aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
else if (ConsensusModule.State.CLOSED == state)
{
unexpectedTermination();
}
if (nowNs >= markFileUpdateDeadlineNs)
{
markFileUpdateDeadlineNs = nowNs + MARK_FILE_UPDATE_INTERVAL_NS;
markFile.updateActivityTimestamp(nowMs);
}
workCount += pollArchiveEvents();
workCount += sendRedirects(redirectSessions, nowNs);
workCount += sendRejections(rejectedSessions, nowNs);
if (null == election)
{
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowNs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs, nowNs);
workCount += checkSessions(sessionByIdMap, nowNs);
workCount += processPassiveMembers(passiveMembers);
if (!ClusterMember.hasActiveQuorum(activeMembers, nowNs, leaderHeartbeatTimeoutNs))
{
ctx.countedErrorHandler().onError(new ClusterException("inactive follower quorum", WARN));
enterElection();
workCount += 1;
}
}
else if (ConsensusModule.State.TERMINATING == state)
{
if (clusterTermination.canTerminate(activeMembers, terminationPosition, nowNs))
{
recordingLog.commitLogPosition(leadershipTermId, terminationPosition);
closeAndTerminate();
}
}
}
else if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
if (nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatTimeoutNs) && NULL_POSITION == terminationPosition)
{
ctx.countedErrorHandler().onError(new ClusterException("leader heartbeat timeout", WARN));
enterElection();
workCount += 1;
}
}
}
return workCount;
}
private int consensusWork(final long timestamp, final long nowNs)
{
int workCount = 0;
if (Cluster.Role.LEADER == role)
{
if (ConsensusModule.State.ACTIVE == state)
{
workCount += timerService.poll(timestamp);
workCount += pendingServiceMessages.forEach(
pendingServiceMessageHeadOffset, serviceSessionMessageAppender, SERVICE_MESSAGE_LIMIT);
workCount += ingressAdapter.poll();
}
workCount += updateLeaderPosition(nowNs);
}
else
{
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
if (NULL_POSITION != terminationPosition && logAdapter.position() >= terminationPosition)
{
serviceProxy.terminationPosition(terminationPosition, ctx.countedErrorHandler());
state(ConsensusModule.State.TERMINATING);
}
else
{
final long limit = null != appendPosition ? appendPosition.get() : logRecordedPosition;
final int count = logAdapter.poll(min(notifiedCommitPosition, limit));
if (0 == count && logAdapter.isImageClosed())
{
ctx.countedErrorHandler().onError(new ClusterException("log disconnected from leader", WARN));
enterElection();
return 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
workCount += ingressAdapter.poll();
workCount += count;
}
}
workCount += updateFollowerPosition(nowNs);
}
workCount += consensusModuleAdapter.poll();
return workCount;
}
private int checkControlToggle(final long nowNs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND))
{
state(ConsensusModule.State.SUSPENDED);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT))
{
state(ConsensusModule.State.SNAPSHOT);
}
break;
case SHUTDOWN:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT))
{
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
final long position = logPublisher.position();
clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs());
clusterTermination.terminationPosition(
errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position);
terminationPosition = position;
state(ConsensusModule.State.SNAPSHOT);
}
break;
case ABORT:
if (ConsensusModule.State.ACTIVE == state)
{
final CountedErrorHandler errorHandler = ctx.countedErrorHandler();
final long position = logPublisher.position();
clusterTermination = new ClusterTermination(nowNs + ctx.terminationTimeoutNs());
clusterTermination.terminationPosition(
errorHandler, consensusPublisher, activeMembers, thisMember, leadershipTermId, position);
terminationPosition = position;
serviceProxy.terminationPosition(terminationPosition, errorHandler);
state(ConsensusModule.State.TERMINATING);
}
break;
default:
return 0;
}
return 1;
}
private boolean appendAction(final ClusterAction action)
{
return logPublisher.appendClusterAction(leadershipTermId, clusterClock.time(), action);
}
private int processPendingSessions(
final ArrayList<ClusterSession> pendingSessions, final long nowMs, final long nowNs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
ctx.timedOutClientCounter().incrementOrdered();
continue;
}
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
if (session.isBackupSession())
{
final RecordingLog.Entry entry = recordingLog.findLastTerm();
if (null != entry && consensusPublisher.backupResponse(
session,
commitPosition.id(),
leaderMember.id(),
entry,
recoveryPlan,
ClusterMember.encodeAsString(activeMembers)))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount += 1;
}
}
else if (appendSessionAndOpen(session, nowNs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
sessionByIdMap.put(session.id(), session);
workCount += 1;
}
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
}
return workCount;
}
private int sendRejections(final ArrayList<ClusterSession> rejectedSessions, final long nowNs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
final String detail = session.responseDetail();
final EventCode eventCode = session.eventCode();
if (egressPublisher.sendEvent(session, leadershipTermId, leaderMember.id(), eventCode, detail) ||
nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount++;
}
}
return workCount;
}
private int sendRedirects(final ArrayList<ClusterSession> redirectSessions, final long nowNs)
{
int workCount = 0;
for (int lastIndex = redirectSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = redirectSessions.get(i);
final EventCode eventCode = EventCode.REDIRECT;
final int leaderId = leaderMember.id();
if (egressPublisher.sendEvent(session, leadershipTermId, leaderId, eventCode, ingressEndpoints) ||
nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
ArrayListUtil.fastUnorderedRemove(redirectSessions, i, lastIndex--);
session.close(ctx.countedErrorHandler());
workCount++;
}
}
return workCount;
}
private int processPassiveMembers(final ClusterMember[] passiveMembers)
{
int workCount = 0;
for (final ClusterMember member : passiveMembers)
{
if (member.correlationId() != NULL_VALUE)
{
if (consensusPublisher.clusterMemberChange(
member.publication(),
member.correlationId(),
leaderMember.id(),
ClusterMember.encodeAsString(activeMembers),
ClusterMember.encodeAsString(passiveMembers)))
{
member.correlationId(NULL_VALUE);
workCount++;
}
}
else if (member.hasRequestedJoin() && member.logPosition() == logPublisher.position())
{
final ClusterMember[] newMembers = ClusterMember.addMember(activeMembers, member);
final long now = clusterClock.time();
if (logPublisher.appendMembershipChangeEvent(
leadershipTermId,
now,
leaderMember.id(),
newMembers.length,
ChangeType.JOIN,
member.id(),
ClusterMember.encodeAsString(newMembers)) > 0)
{
timeOfLastLogUpdateNs = clusterTimeUnit.toNanos(now) - leaderHeartbeatIntervalNs;
this.passiveMembers = ClusterMember.removeMember(this.passiveMembers, member.id());
activeMembers = newMembers;
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
member.hasRequestedJoin(false);
workCount++;
break;
}
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowNs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
if (nowNs > (session.timeOfLastActivityNs() + sessionTimeoutNs))
{
if (session.state() == OPEN)
{
session.closing(CloseReason.TIMEOUT);
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = session.closeReason().name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
i.remove();
session.close(ctx.countedErrorHandler());
ctx.timedOutClientCounter().incrementOrdered();
workCount++;
}
}
else if (session.state() == CLOSING)
{
if (logPublisher.appendSessionClose(session, leadershipTermId, clusterClock.time()))
{
final String msg = session.closeReason().name();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, msg);
session.closedLogPosition(logPublisher.position());
uncommittedClosedSessions.addLast(session);
i.remove();
session.close(ctx.countedErrorHandler());
if (session.closeReason() == CloseReason.TIMEOUT)
{
ctx.timedOutClientCounter().incrementOrdered();
}
workCount++;
}
}
else
{
i.remove();
session.close(ctx.countedErrorHandler());
workCount++;
}
}
else if (session.hasOpenEventPending())
{
workCount += sendSessionOpenEvent(session);
}
else if (session.hasNewLeaderEventPending())
{
workCount += sendNewLeaderEvent(session);
}
}
return workCount;
}
private void captureServiceAck(final long logPosition, final long ackId, final long relevantId, final int serviceId)
{
if (0 == ackId && NULL_VALUE != serviceClientIds[serviceId])
{
throw new ClusterException(
"initial ack already received from service: possible duplicate serviceId=" + serviceId);
}
serviceAckQueues[serviceId].offerLast(new ServiceAck(ackId, logPosition, relevantId));
}
private ServiceAck[] pollServiceAcks(final long logPosition, final int serviceId)
{
final ServiceAck[] serviceAcks = new ServiceAck[serviceAckQueues.length];
for (int id = 0, length = serviceAckQueues.length; id < length; id++)
{
final ServiceAck serviceAck = serviceAckQueues[id].pollFirst();
if (null == serviceAck || serviceAck.logPosition() != logPosition)
{
throw new ClusterException(
"invalid ack for serviceId=" + serviceId + " logPosition=" + logPosition + " " + serviceAck);
}
serviceAcks[id] = serviceAck;
}
return serviceAcks;
}
private int sendNewLeaderEvent(final ClusterSession session)
{
if (egressPublisher.newLeader(session, leadershipTermId, leaderMember.id(), ingressEndpoints))
{
session.hasNewLeaderEventPending(false);
return 1;
}
return 0;
}
private int sendSessionOpenEvent(final ClusterSession session)
{
if (egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.OK, ""))
{
session.hasOpenEventPending(false);
return 1;
}
return 0;
}
private boolean appendSessionAndOpen(final ClusterSession session, final long nowNs)
{
final long resultingPosition = logPublisher.appendSessionOpen(session, leadershipTermId, clusterClock.time());
if (resultingPosition > 0)
{
session.open(resultingPosition);
session.timeOfLastActivityNs(nowNs);
return true;
}
return false;
}
private void createAppendPosition(final int logSessionId)
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, logSessionId);
final long registrationId = counters.getCounterRegistrationId(counterId);
appendPosition = new ReadableCounter(counters, registrationId, counterId);
logRecordedPosition = NULL_POSITION;
if (NULL_VALUE == logRecordingId)
{
logRecordingId = RecordingPos.getRecordingId(counters, counterId);
}
}
private void loadSnapshot(final RecordingLog.Snapshot snapshot, final AeronArchive archive)
{
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final int sessionId = (int)archive.startReplay(snapshot.recordingId, 0, NULL_LENGTH, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final ConsensusModuleSnapshotLoader snapshotLoader = new ConsensusModuleSnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new ClusterException("snapshot ended unexpectedly");
}
}
idle(fragments);
}
final int appVersion = snapshotLoader.appVersion();
if (SemanticVersion.major(ctx.appVersion()) != SemanticVersion.major(appVersion))
{
throw new ClusterException(
"incompatible version: " + SemanticVersion.toString(ctx.appVersion()) +
" snapshot=" + SemanticVersion.toString(appVersion));
}
final TimeUnit timeUnit = snapshotLoader.timeUnit();
if (timeUnit != clusterTimeUnit)
{
throw new ClusterException("incompatible time unit: " + clusterTimeUnit + " snapshot=" + timeUnit);
}
pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE);
}
timerService.currentTickTime(clusterClock.time());
leadershipTermId(snapshot.leadershipTermId);
commitPosition.setOrdered(snapshot.logPosition);
expectedAckPosition = snapshot.logPosition;
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int snapshotsCount = plan.snapshots.size();
if (snapshotsCount > 0)
{
final long[] serviceSnapshotRecordingIds = new long[snapshotsCount - 1];
final RecordingLog.Snapshot snapshot = plan.snapshots.get(0);
for (int i = 1; i < snapshotsCount; i++)
{
final RecordingLog.Snapshot serviceSnapshot = plan.snapshots.get(i);
serviceSnapshotRecordingIds[serviceSnapshot.serviceId] = serviceSnapshot.recordingId;
}
return RecoveryState.allocate(
aeron,
snapshot.leadershipTermId,
snapshot.logPosition,
snapshot.timestamp,
ctx.clusterId(),
serviceSnapshotRecordingIds);
}
return RecoveryState.allocate(aeron, leadershipTermId, 0, 0, ctx.clusterId());
}
private DynamicJoin requiresDynamicJoin()
{
if (0 == activeMembers.length && null != ctx.clusterConsensusEndpoints())
{
return new DynamicJoin(ctx.clusterConsensusEndpoints(), archive, consensusPublisher, ctx, this);
}
return null;
}
private void captureServiceClientIds()
{
for (int i = 0, length = serviceClientIds.length; i < length; i++)
{
final ServiceAck serviceAck = serviceAckQueues[i].pollFirst();
serviceClientIds[i] = Objects.requireNonNull(serviceAck).relevantId();
}
}
private void handleMemberRemovals(final long commitPosition)
{
ClusterMember[] members = activeMembers;
for (final ClusterMember member : activeMembers)
{
if (member.hasRequestedRemove() && member.removalPosition() <= commitPosition)
{
if (member.id() == memberId)
{
state(ConsensusModule.State.QUITTING);
}
members = ClusterMember.removeMember(members, member.id());
clusterMemberByIdMap.remove(member.id());
clusterMemberByIdMap.compact();
member.closePublication(ctx.countedErrorHandler());
logPublisher.removeDestination(ctx.isLogMdc(), member.logEndpoint());
pendingMemberRemovals--;
}
}
activeMembers = members;
rankedPositions = new long[ClusterMember.quorumThreshold(members.length)];
}
private int updateLeaderPosition(final long nowNs)
{
if (null != appendPosition)
{
return updateLeaderPosition(nowNs, appendPosition.get());
}
return 0;
}
int updateLeaderPosition(final long nowNs, final long position)
{
thisMember.logPosition(position).timeOfLastAppendPositionNs(nowNs);
final long commitPosition = min(quorumPosition(activeMembers, rankedPositions), position);
if (commitPosition > this.commitPosition.getWeak() ||
nowNs >= (timeOfLastLogUpdateNs + leaderHeartbeatIntervalNs))
{
for (final ClusterMember member : activeMembers)
{
if (member.id() != memberId)
{
consensusPublisher.commitPosition(
member.publication(), leadershipTermId, commitPosition, memberId);
}
}
this.commitPosition.setOrdered(commitPosition);
timeOfLastLogUpdateNs = nowNs;
clearUncommittedEntriesTo(commitPosition);
if (pendingMemberRemovals > 0)
{
handleMemberRemovals(commitPosition);
}
return 1;
}
return 0;
}
LogReplication newLogReplication(
final String leaderArchiveEndpoint, final long leaderRecordingId, final long stopPosition, final long nowNs)
{
return new LogReplication(
archive,
leaderRecordingId,
logRecordingId,
stopPosition,
leaderArchiveEndpoint,
ctx.replicationChannel(),
ctx.leaderHeartbeatTimeoutNs(),
ctx.leaderHeartbeatIntervalNs(),
nowNs);
}
private int updateFollowerPosition(final long nowNs)
{
final long recordedPosition = null != appendPosition ? appendPosition.get() : logRecordedPosition;
final long position = Math.max(recordedPosition, lastAppendPosition);
if ((recordedPosition > lastAppendPosition ||
nowNs >= (timeOfLastAppendPositionNs + leaderHeartbeatIntervalNs)) &&
consensusPublisher.appendPosition(leaderMember.publication(), leadershipTermId, position, memberId))
{
lastAppendPosition = position;
timeOfLastAppendPositionNs = nowNs;
return 1;
}
return 0;
}
private void clearSessionsAfter(final long logPosition)
{
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
if (session.openedLogPosition() > logPosition)
{
i.remove();
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election");
session.close(ctx.countedErrorHandler());
}
}
for (final ClusterSession session : pendingSessions)
{
egressPublisher.sendEvent(session, leadershipTermId, memberId, EventCode.CLOSED, "election");
session.close(ctx.countedErrorHandler());
}
pendingSessions.clear();
}
private void clearUncommittedEntriesTo(final long commitPosition)
{
if (uncommittedServiceMessages > 0)
{
pendingServiceMessageHeadOffset -= pendingServiceMessages.consume(
leaderServiceSessionMessageSweeper, Integer.MAX_VALUE);
}
while (uncommittedTimers.peekLong() <= commitPosition)
{
uncommittedTimers.pollLong();
uncommittedTimers.pollLong();
}
while (true)
{
final ClusterSession clusterSession = uncommittedClosedSessions.peekFirst();
if (null == clusterSession || clusterSession.closedLogPosition() > commitPosition)
{
break;
}
uncommittedClosedSessions.pollFirst();
}
}
private void restoreUncommittedEntries(final long commitPosition)
{
for (final LongArrayQueue.LongIterator i = uncommittedTimers.iterator(); i.hasNext(); )
{
final long appendPosition = i.nextValue();
final long correlationId = i.nextValue();
if (appendPosition > commitPosition)
{
timerService.scheduleTimerForCorrelationId(correlationId, timerService.currentTickTime());
}
}
uncommittedTimers.clear();
pendingServiceMessages.consume(followerServiceSessionMessageSweeper, Integer.MAX_VALUE);
pendingServiceMessageHeadOffset = 0;
if (uncommittedServiceMessages > 0)
{
pendingServiceMessages.consume(leaderServiceSessionMessageSweeper, Integer.MAX_VALUE);
pendingServiceMessages.forEach(this::serviceSessionMessageReset, Integer.MAX_VALUE);
uncommittedServiceMessages = 0;
}
ClusterSession session;
while (null != (session = uncommittedClosedSessions.pollFirst()))
{
if (session.closedLogPosition() > commitPosition)
{
session.closedLogPosition(NULL_POSITION);
session.state(CLOSING);
sessionByIdMap.put(session.id(), session);
}
}
}
private void enterElection()
{
role(Cluster.Role.FOLLOWER);
election = new Election(
false,
leadershipTermId,
commitPosition.getWeak(),
null != appendPosition ? appendPosition.get() : recoveryPlan.appendedLogPosition,
activeMembers,
clusterMemberByIdMap,
thisMember,
consensusPublisher,
ctx,
this);
}
private void idle()
{
checkInterruptStatus();
aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
idleStrategy.idle();
pollArchiveEvents();
}
private void idle(final int workCount)
{
checkInterruptStatus();
aeronClientInvoker.invoke();
if (aeron.isClosed())
{
throw new AgentTerminationException("unexpected Aeron close");
}
idleStrategy.idle(workCount);
if (0 == workCount)
{
pollArchiveEvents();
}
}
private static void checkInterruptStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new AgentTerminationException("interrupted");
}
}
private void takeSnapshot(final long timestamp, final long logPosition, final ServiceAck[] serviceAcks)
{
final long recordingId;
try (ExclusivePublication publication = aeron.addExclusivePublication(
ctx.snapshotChannel(), ctx.snapshotStreamId()))
{
final String channel = ChannelUri.addSessionId(ctx.snapshotChannel(), publication.sessionId());
archive.startRecording(channel, ctx.snapshotStreamId(), LOCAL, true);
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, logPosition, replayLeadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
}
catch (final ArchiveException ex)
{
if (ex.errorCode() == ArchiveException.STORAGE_SPACE)
{
ctx.countedErrorHandler().onError(ex);
unexpectedTermination();
}
throw ex;
}
final long termBaseLogPosition = recordingLog.getTermEntry(replayLeadershipTermId).termBaseLogPosition;
for (int serviceId = serviceAcks.length - 1; serviceId >= 0; serviceId--)
{
final long snapshotId = serviceAcks[serviceId].relevantId();
recordingLog.appendSnapshot(
snapshotId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, serviceId);
}
recordingLog.appendSnapshot(
recordingId, replayLeadershipTermId, termBaseLogPosition, logPosition, timestamp, SERVICE_ID);
recordingLog.force(ctx.fileSyncLevel());
recoveryPlan = recordingLog.createRecoveryPlan(archive, ctx.serviceCount(), Aeron.NULL_VALUE);
ctx.snapshotCounter().incrementOrdered();
final long nowNs = clusterClock.timeNanos();
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityNs(nowNs);
}
}
private void awaitRecordingComplete(
final long recordingId, final long position, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
while (counters.getCounterValue(counterId) < position)
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new ClusterException("recording has stopped unexpectedly: " + recordingId);
}
}
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
while (0 == counters.getCounterRegistrationId(counterId))
{
idle();
}
return counterId;
}
private void snapshotState(
final ExclusivePublication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion());
snapshotTaker.snapshotConsensusModuleState(
nextSessionId, nextServiceSessionId, logServiceSessionId, pendingServiceMessages.size());
snapshotTaker.snapshotClusterMembers(memberId, highMemberId, activeMembers);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN || session.state() == CLOSED)
{
snapshotTaker.snapshotSession(session);
}
}
timerService.snapshot(snapshotTaker);
snapshotTaker.snapshot(pendingServiceMessages);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0, clusterTimeUnit, ctx.appVersion());
}
private void clusterMemberJoined(final int memberId, final ClusterMember[] newMembers)
{
highMemberId = Math.max(highMemberId, memberId);
final ClusterMember eventMember = ClusterMember.findMember(newMembers, memberId);
if (null != eventMember)
{
if (null == eventMember.publication())
{
ClusterMember.addConsensusPublication(
eventMember, ChannelUri.parse(ctx.consensusChannel()), ctx.consensusStreamId(), aeron);
}
activeMembers = ClusterMember.addMember(activeMembers, eventMember);
clusterMemberByIdMap.put(memberId, eventMember);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
}
}
private void clusterMemberQuit(final int memberId)
{
activeMembers = ClusterMember.removeMember(activeMembers, memberId);
clusterMemberByIdMap.remove(memberId);
rankedPositions = new long[ClusterMember.quorumThreshold(activeMembers.length)];
}
private void onUnavailableIngressImage(final Image image)
{
ingressAdapter.freeSessionBuffer(image.sessionId());
}
private void enqueueServiceSessionMessage(
final MutableDirectBuffer buffer, final int offset, final int length, final long clusterSessionId)
{
final int headerOffset = offset - SessionMessageHeaderDecoder.BLOCK_LENGTH;
final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset();
buffer.putLong(clusterSessionIdOffset, clusterSessionId, SessionMessageHeaderDecoder.BYTE_ORDER);
buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderDecoder.BYTE_ORDER);
if (!pendingServiceMessages.append(buffer, offset - SESSION_HEADER_LENGTH, length + SESSION_HEADER_LENGTH))
{
throw new ClusterException("pending service message buffer capacity: " + pendingServiceMessages.size());
}
}
private boolean serviceSessionMessageAppender(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int headerOffset = offset + MessageHeaderDecoder.ENCODED_LENGTH;
final int clusterSessionIdOffset = headerOffset + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
final int timestampOffset = headerOffset + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long clusterSessionId = buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
final long appendPosition = logPublisher.appendMessage(
leadershipTermId,
clusterSessionId,
clusterClock.time(),
buffer,
offset + SESSION_HEADER_LENGTH,
length - SESSION_HEADER_LENGTH);
if (appendPosition > 0)
{
++uncommittedServiceMessages;
logServiceSessionId = clusterSessionId;
pendingServiceMessageHeadOffset = headOffset;
buffer.putLong(timestampOffset, appendPosition, SessionMessageHeaderEncoder.BYTE_ORDER);
return true;
}
return false;
}
private boolean serviceSessionMessageReset(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int timestampOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
if (appendPosition < Long.MAX_VALUE)
{
buffer.putLong(timestampOffset, Long.MAX_VALUE, SessionMessageHeaderEncoder.BYTE_ORDER);
return true;
}
return false;
}
private boolean leaderServiceSessionMessageSweeper(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int timestampOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.timestampEncodingOffset();
final long appendPosition = buffer.getLong(timestampOffset, SessionMessageHeaderDecoder.BYTE_ORDER);
if (appendPosition <= commitPosition.getWeak())
{
--uncommittedServiceMessages;
return true;
}
return false;
}
private boolean followerServiceSessionMessageSweeper(
final MutableDirectBuffer buffer, final int offset, final int length, final int headOffset)
{
final int clusterSessionIdOffset = offset +
MessageHeaderDecoder.ENCODED_LENGTH + SessionMessageHeaderDecoder.clusterSessionIdEncodingOffset();
return buffer.getLong(clusterSessionIdOffset, SessionMessageHeaderDecoder.BYTE_ORDER) <= logServiceSessionId;
}
private void onUnavailableCounter(final CountersReader counters, final long registrationId, final int counterId)
{
if (ConsensusModule.State.TERMINATING != state && ConsensusModule.State.QUITTING != state)
{
for (final long clientId : serviceClientIds)
{
if (registrationId == clientId)
{
ctx.countedErrorHandler().onError(new ClusterException(
"Aeron client in service closed unexpectedly", WARN));
state(ConsensusModule.State.CLOSED);
return;
}
}
if (null != appendPosition && appendPosition.registrationId() == registrationId)
{
appendPosition = null;
logSubscriptionId = NULL_VALUE;
if (null != election)
{
election.handleError(
clusterClock.timeNanos(), new ClusterException("log recording ended unexpectedly", WARN));
}
else if (NULL_POSITION == terminationPosition)
{
ctx.countedErrorHandler().onError(new ClusterException("log recording ended unexpectedly", WARN));
enterElection();
}
}
}
}
private void closeAndTerminate()
{
tryStopLogRecording();
state(ConsensusModule.State.CLOSED);
terminateAgent();
}
private void unexpectedTermination()
{
aeron.removeUnavailableCounterHandler(unavailableCounterHandlerRegistrationId);
serviceProxy.terminationPosition(0, ctx.countedErrorHandler());
tryStopLogRecording();
state(ConsensusModule.State.CLOSED);
terminateAgent();
}
private void terminateAgent()
{
try
{
ctx.terminationHook().run();
}
catch (final Throwable ex)
{
ctx.countedErrorHandler().onError(ex);
}
throw new ClusterTerminationException();
}
private void tryStopLogRecording()
{
appendPosition = null;
if (NULL_VALUE != logSubscriptionId && archive.archiveProxy().publication().isConnected())
{
try
{
archive.tryStopRecording(logSubscriptionId);
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException(ex, WARN));
}
logSubscriptionId = NULL_VALUE;
}
if (NULL_VALUE != logRecordingId)
{
try
{
archive.tryStopRecordingByIdentity(logRecordingId);
}
catch (final Exception ex)
{
ctx.countedErrorHandler().onError(new ClusterException(ex, WARN));
}
}
}
private long getLastAppendedPosition()
{
idleStrategy.reset();
while (true)
{
final long appendPosition = archive.getStopPosition(logRecordingId);
if (NULL_POSITION != appendPosition)
{
return appendPosition;
}
idle();
}
}
private void appendDynamicJoinTermAndSnapshots()
{
if (!dynamicJoinSnapshots.isEmpty())
{
final RecordingLog.Snapshot lastSnapshot = dynamicJoinSnapshots.get(dynamicJoinSnapshots.size() - 1);
recordingLog.appendTerm(
logRecordingId,
lastSnapshot.leadershipTermId,
lastSnapshot.termBaseLogPosition,
lastSnapshot.timestamp);
for (int i = dynamicJoinSnapshots.size() - 1; i >= 0; i--)
{
final RecordingLog.Snapshot snapshot = dynamicJoinSnapshots.get(i);
recordingLog.appendSnapshot(
snapshot.recordingId,
snapshot.leadershipTermId,
snapshot.termBaseLogPosition,
snapshot.logPosition,
snapshot.timestamp,
snapshot.serviceId);
}
dynamicJoinSnapshots.clear();
}
}
private void connectIngress()
{
if (!ctx.ingressChannel().contains(ENDPOINT_PARAM_NAME))
{
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.ingressEndpoint());
ingressAdapter.connect(aeron.addSubscription(
ingressUri.toString(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage));
}
else if (Cluster.Role.LEADER == role)
{
ingressAdapter.connect(aeron.addSubscription(
ctx.ingressChannel(), ctx.ingressStreamId(), null, this::onUnavailableIngressImage));
}
}
public String toString()
{
return "ConsensusModuleAgent{" +
"election=" + election +
'}';
}
}
| [Java] Await registration id before constructing ReadableCounter when creating the append position.
| aeron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java | [Java] Await registration id before constructing ReadableCounter when creating the append position. | <ide><path>eron-cluster/src/main/java/io/aeron/cluster/ConsensusModuleAgent.java
<ide> {
<ide> final CountersReader counters = aeron.countersReader();
<ide> final int counterId = awaitRecordingCounter(counters, logSessionId);
<del> final long registrationId = counters.getCounterRegistrationId(counterId);
<del>
<add>
<add> long registrationId;
<add> while (0 == (registrationId = counters.getCounterRegistrationId(counterId)))
<add> {
<add> idle();
<add> }
<add>
<add> logRecordingId = RecordingPos.getRecordingId(counters, counterId);
<ide> appendPosition = new ReadableCounter(counters, registrationId, counterId);
<ide> logRecordedPosition = NULL_POSITION;
<del> if (NULL_VALUE == logRecordingId)
<del> {
<del> logRecordingId = RecordingPos.getRecordingId(counters, counterId);
<del> }
<ide> }
<ide>
<ide> private void loadSnapshot(final RecordingLog.Snapshot snapshot, final AeronArchive archive)
<ide> counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
<ide> }
<ide>
<del> while (0 == counters.getCounterRegistrationId(counterId))
<del> {
<del> idle();
<del> }
<del>
<ide> return counterId;
<ide> }
<ide> |
|
JavaScript | mit | 59c004e0791ae6696b85f87bd3c6db361ab708ff | 0 | bigeasy/strata,bigeasy/strata | // **TODO** Need to ensure partition does not result in endless balance loops.
// **TODO** Vacuum delete-heavy pages.
// **TODO** Really for write-ahead, append the block list so you can read it
// quickly and deserialize it quickly. You can write each block list at a
// position in the log and write out a map to the position of the block list
// indexed by key. This takes advantage of scatter-read over sequential write
// that is now in vogue.
'use strict'
// Sort function generator.
const ascension = require('ascension')
// Comparator decorator that extracts the sorted fields from an object.
const whittle = require('whittle')
// Node.js API.
const assert = require('assert')
const path = require('path')
const fileSystem = require('fs')
const fs = require('fs').promises
// Return the first non null-like value.
const { coalesce } = require('extant')
// An `async`/`await` work queue.
const Turnstile = require('turnstile')
// Journaled file system operations for tree rebalancing.
const Journalist = require('journalist')
// A pausable service work queue that shares a common application work queue.
const Fracture = require('fracture')
// A non-crypographic (fast) 32-bit hash for record integrity.
// const fnv = require('./fnv')
// Serialize a single b-tree record.
const Recorder = require('transcript/recorder')
// Incrementally read a b-tree page chunk by chunk.
const Player = require('transcript/player')
// Catch nested exceptions by type, message and properties.
const rescue = require('rescue')
// A Promise wrapper that captures `resolve` and `reject`.
const Future = require('perhaps')
// Binary search for a record in a b-tree page.
const find = require('./find')
// Partition a leaf page according to user's desired groupings.
const Partition = require('./partition')
// An `Error` type specific to Strata.
const Strata = { Error: require('./error') }
//
// Sheaf is the crux of Strata. It exists as a separate object possibly for
// legacy reasons, and it will stay that way because it makes `Strata` and
// `Cursor` something a user can read to understand the interface.
//
class Sheaf {
static options (options) {
const leaf = coalesce(options.leaf, {})
options.leaf = {
split: coalesce(leaf.split, 5),
merge: coalesce(leaf.merge, 1)
}
const branch = coalesce(options.branch, {})
options.branch = {
split: coalesce(branch.split, 5),
merge: coalesce(branch.merge, 1)
}
options.comparator = function () {
const zero = object => object
if (options.comparator == null) {
const comparator = whittle(ascension([ String ]), value => [ value ])
return { leaf: comparator, branch: comparator, zero }
} else if (typeof options.comparator == 'function') {
return { leaf: options.comparator, branch: options.comparator, zero }
} else {
return options.comparator
}
} ()
return options
}
// Sheaf accepts the destructible and user options passed to `new Strata`
constructor (destructible, options) {
Strata.Error.assert(options.turnstile != null, 'OPTION_REQUIRED', { _option: 'turnstile' })
this.options = Sheaf.options(options)
this.pages = options.pages
this.comparator = options.comparator
this.leaf = options.leaf
this.branch = options.branch
this.storage = options.storage
this.storage.deferrable.increment()
this._id = 0
this._root = this._create({ id: -1, leaf: false, items: [{ id: '0.0' }] }, [])
this._root.cartridge.heft = 1
this.destructible = destructible
this.deferrable = destructible.durable($ => $(), { countdown: 1 }, 'deferrable')
// **TODO** Do not worry about wrapping anymore.
// Operation id wraps at 32-bits, cursors should not be open that long.
this._operationId = 0xffffffff
// Concurrency and work queues. One keyed queue for page writes, the
// other queue will only use a single key for all housekeeping.
this._fracture = new Fracture(destructible.durable($ => $(), 'appender'), {
turnstile: options.turnstile,
value: key => {
switch (key) {
case 'keephouse':
return { candidates: [] }
default:
return {
id: this._operationId = (this._operationId + 1 & 0xffffffff) >>> 0,
writes: [],
cartridge: this.pages.hold(key),
future: new Future
}
}
},
worker: this._fractured.bind(this),
cancel: ({ key, value }) => {
if (key == 'append') {
value.cartridge.release()
}
}
})
this._fracture.deferrable.increment()
// **TODO** Not yet used, would `mkdir` any pages that need to be
// inspected for balance.
this._canceled = new Set
destructible.destruct(() => this.deferrable.decrement())
// This used to remove the root page but we have a race with reads which
// are not tracked with Destructible nor Fracture so we have no way to
// wait for reads to drain and we've always imagined that maybe reads
// could be considered entirely independent of writes, continuing after
// write failure, or maybe having a read-only tree, so we don't fight
// this one. In applications we can assert that the cache is zero after
// everything that could be reading has shut down.
this.deferrable.destruct(() => {
this.deferrable.ephemeral('shutdown', async () => {
// Trying to figure out how to wait for the Turnstile to drain.
// We can't terminate the housekeeping turnstile then the
// acceptor turnstile because they depend on each other, so
// we're going to loop. We wait for one to drain, then the
// other, then check to see if anything is in the queues to
// determine if we can leave the loop. Actually, we only need to
// check the size of the first queue in the loop, the second
// will be empty when `drain` returns.
//
// **TODO** Really want to just push keys into a file for
// inspection when we reopen for housekeeping.
//
// **TODO** This is why we finally block cartridge release. We get
// remove error here if we don't.
await this.drain()
this.track = true
this._fracture.deferrable.decrement()
this.storage.deferrable.decrement()
if (this._root != null) {
this._root.cartridge.release()
this._root = null
}
})
})
}
read (id) {
return this.storage.read(id)
}
//
// We load the page then check for a race after we've loaded. If a different
// strand beat us to it, we just ignore the result of our read and return
// the cached entry.
//
async load (id) {
const { page, heft } = await this.read(id)
const entry = this.pages.hold(id, null)
if (entry.value == null) {
entry.value = page
entry.heft = heft
}
return entry
}
_create (page, cartridges) {
const cartridge = this.pages.hold(page.id, page)
cartridges.push(cartridge)
return { page: cartridge.value, cartridge }
}
_descend (entries, { key, level = -1, fork = false, rightward = false, approximate = false }) {
const descent = { miss: null, keyed: null, level: 0, index: 0, entry: null,
pivot: null,
cartridge: null,
page: null
}
let entry = null
entries.push(entry = this.pages.hold(-1))
for (;;) {
// When you go rightward at the outset or fork you might hit this
// twice, but it won't matter because you're not going to use the
// pivot anyway.
//
// You'll struggle to remember this, but it is true...
//
if (descent.index != 0) {
//
// The last key we visit is the key for the leaf page for whatever
// level we stop at. This holds true even if we fork. We hold onto
// the previous pivot and if the left fork is not not the zero index
// of the branch page, then the previous pivot is the key for the
// leaf of the fork. Note that for balancing, we only fork when we
// match the exact key in a branch. We have an approximate fork for
// the user in case we eliminate the leaf page with a merge, they
// will land in the merged page at the first index less than the
// key. The right key tracking will also be correct since we will
// immediately pick up a right key when we leave this block.
//
const pivot = descent.pivot
descent.pivot = {
key: entry.value.items[descent.index].key,
level: descent.level - 1
}
//
// If we're trying to find siblings we're using an exact key that is
// definitely above the level sought, we'll see it and then go left
// or right if there is a branch in that direction.
//
// Earlier I had this at KILLROY below. And I adjust the level, but
// I don't reference the level, so it's probably fine here.
//
if (this.comparator.branch(descent.pivot.key, key) == 0 && fork) {
descent.index--
rightward = true
descent.pivot = descent.index != 0
? { key: entry.value.items[descent.index].key, level: descent.level - 1 }
: pivot
}
}
// You don't fork right. You can track the rightward key though.
if (descent.index + 1 < entry.value.items.length) {
descent.right = entry.value.items[descent.index + 1].key
}
// We exit at the leaf, so this will always be a branch page.
const id = entry.value.items[descent.index].id
// Attempt to hold the page from the cache, return the id of the
// page if we have a cache miss.
entry = this.pages.hold(id)
if (entry == null) {
return { miss: id }
}
entries.push(entry)
// TODO Move this down below the leaf return and do not search if
// we are searching for a leaf.
// Binary search the page for the key, or just go right or left
// directly if there is no key.
const offset = entry.value.leaf ? 0 : 1
const index = rightward
? entry.value.leaf ? ~(entry.value.items.length - 1) : entry.value.items.length - 1
: key != null
? find(this.comparator.leaf, entry.value.items, key, offset)
: entry.value.leaf ? ~0 : 0
//
// If the page is a leaf, assert that we're looking for a leaf and
// return the leaf page.
//
if (entry.value.leaf) {
descent.found = index >= 0
descent.index = index < 0 ? ~index : index
assert.equal(level, -1, 'could not find branch')
break
}
//
// If the index is less than zero we didn't find the exact key, so
// we're looking at the bitwise not of the insertion point which is
// right after the branch we're supposed to descend, so back it up
// one.
//
descent.index = index < 0 ? ~index - 1 : index
// We're trying to reach branch and we've hit the level.
if (level == descent.level) {
break
}
// KILLROY was here.
descent.level++
}
//
// **TODO** What happens when we merge a leaf page so that the key is
// gone and then we delete all the values before the key? Essentially,
// what is the effect of searching for a key that is not a leaf key
// whose value is greater than the leaf key it lands on and less than
// the least value in the page? We can test this without branch races.
// If it is `-1` that's fine. You're not supposed to fork to find an
// insert location. I believe `-1` is a stop for reverse iteration.
// Write a test and come back and document this with more confidence.
//
if (fork && !rightward) {
if (approximate) {
descent.index--
descent.found = false
} else {
return null
}
}
return descent
}
//
// We hold onto the entries array for the descent to prevent the unlikely
// race condition where we cannot descend because we have to load a page,
// but while we're loading a page another page in the descent unloads.
//
// Conceivably, this could continue indefinitely.
//
async descend (query, callerEntries, internal = true) {
const entries = [[]]
for (;;) {
entries.push([])
const descent = this._descend(entries[1], query)
entries.shift().forEach(entry => entry.release())
if (descent == null) {
entries.shift().forEach((entry) => entry.release())
return null
}
if (descent.miss == null) {
callerEntries.push(descent.entry = entries[0].pop())
entries.shift().forEach(entry => entry.release())
descent.cartridge = descent.entry
descent.page = descent.cartridge.value
return descent
}
const entry = await this.load(descent.miss)
entries[0].push(entry)
}
}
//
// You keep trying to make the catch block a finally block but then notice
// that the release of the entries is conditional not not missing. That is,
// if you miss you don't want to release the entries, but a finally block
// always releases the entries.
//
descend2 (trampoline, query, found) {
this.deferrable.operational()
const entries = []
try {
const descent = this._descend(entries, query)
if (descent.miss) {
trampoline.promised(async () => {
try {
entries.push(await this.deferrable.destructive('load', this.load(descent.miss)))
this.descend2(trampoline, query, found)
} finally {
entries.forEach(entry => entry.release())
}
})
} else {
if (descent != null) {
descent.entry = entries.pop()
}
entries.forEach(entry => entry.release())
entries.length = 0
found(descent)
}
} catch (error) {
entries.forEach(entry => entry.release())
throw error
}
}
// Writes appear to be able to run with impunity. What was the logic there?
// Something about the leaf being written to synchronously, but if it was
// asynchronous, then it is on the user to assert that the page has not
// changed.
//
// The block will wait on a promise release preventing any of the writes
// from writing.
//
// Keep in mind that there is only one housekeeper, so that might factor
// into the logic here.
//
// Can't see what's preventing writes from becoming stale. Do I ensure that
// they are written before the split? Must be.
//
async _append (canceled, key, { writes, cartridge, future }) {
await this.deferrable.copacetic($ => $(), 'append', null, async () => {
try {
this.deferrable.progress()
const page = cartridge.value
if (
(
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= this.leaf.merge
)
) {
this._fracture.enqueue('keephouse').value.candidates.push(page.key || page.items[0].key)
}
await this.storage.writeLeaf(page, writes)
} finally {
cartridge.release()
future.resolve()
}
})
}
append (id, buffer, writes) {
this.deferrable.operational()
const append = this._fracture.enqueue(id)
append.value.writes.push(buffer)
writes.add(append.future)
}
drain () {
return this._fracture.drain()
}
_unbalanced (page) {
return page.leaf
? (
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= this.leaf.merge
)
: (
page.items.length >= this.branch.split
)
||
(
page.id == '0.0'
? +page.items[0].id.split('.')[1] % 2 == 0 && page.items.length == 1
: page.items.length <= this.branch.merge
)
}
_balanceIf (branch, messages, message) {
if (this._unbalanced(branch.page)) {
messages.push(message)
}
}
// Assume there is nothing to block or worry about with the branch pages.
// Can't recall at the moment, though. Descents are all synchronous.
//
// You've come back to this and it really bothers you that these slices are
// performed twice, once in the journalist and once in the commit. You
// probably want to let this go for now until you can see clearly how you
// might go about eliminating this duplication. Perhaps the commit uses the
// journalist to descend, lock, etc. just as the Cursor does. Or maybe the
// Journalist is just a Sheaf of pages, which does perform the leaf write,
// but defers to the Commit, now called a Journalist, to do the splits.
//
// It is not the case that the cached information is in some format that is
// not ready for serialization. What do we get exactly? What we'll see at
// first is that these two are calling each other a lot, so we're going to
// probably want to move more logic back over to Commit, including leaf
// splits. It will make us doubt that we could ever turn this easily into an
// R*Tree but the better the architecture, the easier it will be to extract
// components for reuse as modules, as opposed to making this into some sort
// of pluggable framework.
//
// Maybe it just came to me. Why am I logging `drain`, `fill`, etc? The
// commit should just expose `emplace` and the journalist can do the split
// and generate the pages and then the Commit is just journaled file system
// operations. It won't even update the heft, it will just return the new
// heft and maybe it doesn't do the page reads either.
//
// We'd only be duplicating the splices, really.
//
async _drainRoot (messages, cartridges) {
const root = await this.descend({ key: null, level: 0 }, cartridges)
const partition = Math.floor(root.entry.value.items.length / 2)
const left = this._create({
id: this.storage.nextId(false),
offset: 1,
items: root.page.items.slice(0, partition),
hash: null,
stop: 0
}, cartridges)
const right = this._create({
id: this.storage.nextId(false),
offset: 1,
items: root.page.items.slice(partition),
hash: null,
stop: 0
}, cartridges)
root.page.items = [{
id: left.page.id,
key: null,
heft: left.page.items[0].heft
}, {
id: right.page.id,
key: right.page.items[0].key,
heft: left.page.items[0].heft
}]
right.page.items[0].key = null
right.page.items[0].heft = left.page.items[0].heft
messages.forEach(message => message.level++)
this._balanceIf(left, messages, { method: 'balance', key: null, level: 1 })
this._balanceIf(right, messages, { method: 'balance', key: root.page.items[1].key, level: 1 })
await this.storage.writeDrainRoot({ left, right, root })
}
async balance (key, level, messages, cartridges) {
const branch = await this.descend({ key, level }, cartridges)
const leaves = +branch.page.items[0].id.split('.')[1] % 2 == 1
if (branch.page.items.length >= this.branch.split) {
if (branch.page.id == '0.0') {
await this._drainRoot(messages, cartridges)
} else {
await this._splitBranch(key, level, messages, cartridges)
}
} else if (branch.page.items.length <= this.branch.merge) {
if (branch.page.id != '0.0') {
const merger = await this._selectMerger(key, branch, cartridges)
await this._mergeBranch(merger, messages, cartridges)
} else if (! leaves && branch.page.items.length == 1) {
await this._fillRoot(messages, cartridges)
}
}
}
async _splitBranch (key, level, messages, cartridges) {
const left = await this.descend({ key, level }, cartridges)
const parent = await this.descend({ key, level: level - 1 }, cartridges)
const partition = Math.floor(left.page.items.length / 2)
const right = this._create({
id: this.storage.nextId(false),
items: left.page.items.splice(partition),
leaf: false,
stop: 0
}, cartridges)
const promotion = right.page.items[0].key
right.page.items[0].key = null
left.page.items = left.page.items.splice(0, partition)
parent.page.items.splice(parent.index + 1, 0, {
key: promotion,
id: right.page.id,
heft: parent.page.items[parent.page.items.length - 1].heft
})
this._balanceIf(left, messages, { method: 'balance', key: key, level: level })
this._balanceIf(right, messages, { method: 'balance', key: promotion, level: level })
this._balanceIf(parent, messages, { method: 'balance', key: key, level: level - 1 })
await this.storage.writeSplitBranch({ promotion, left, right, parent })
}
//
// **TODO** This is what we'll call a vacuum for the sake of removing delete
// messages.
//
async _rotate () {
}
//
// Split leaf. We always split a new page off to the right. Because we
// always merge two pages together into the left page our left-most page id
// will never change, it will always be `0.1`.
//
// Split is performed by creating two new stub append log. One for the
// existing page which is now the left page and one for the new right page.
// When either of these pages loads they will load the old existing page,
// then split the page and continue with new records added to the subsequent
// append log.
//
async _splitLeaf (pause, key, left, cartridges, displace) {
// Descend to the parent branch page.
const parent = await this.descend({ key, level: left.level - 1 }, cartridges)
// Create the right page now so we can lock it. We're going to
// synchronously add it to the tree and then do the housekeeping to
// persist the split asynchronously. While we're async, someone could
// descend the tree and start writing. In fact, this is very likely to
// happen during a batch insert by the user.
const right = this._create({
id: this.storage.nextId(true),
leaf: true,
items: [],
right: null,
dependents: {},
key: null,
log: null,
stop: 0
}, cartridges)
const messages = []
// Create our journaled tree alterations.
const pauses = []
try {
pauses.push(await pause(left.page.id))
pauses.push(await pause(right.page.id))
// Race is the wrong word, it's our synchronous time. We have to split
// the page and then write them out. Anyone writing to this leaf has to
// to be able to see the split so that they surrender their cursor if
// their insert or delete belongs in the new page, not the old one.
//
// Notice that all the page manipulation takes place before the first
// write. Recall that the page manipulation is done to the page in
// memory which is offical, the page writes are lagging.
// Split page creating a right page.
const length = left.page.items.length
const partition = Partition(this.comparator.branch, left.page.items)
// If we cannot partition because the leaf and branch have different
// partition comparators and the branch comparator considers all keys
// identical, we give up and return. We will have gone through the
// housekeeping queue to get here, and if the user keeps inserting keys
// that are identical according to the branch comparator, we'll keep
// making our futile attempts to split. Currently, though, we're only
// going to see this behavior in Amalgamate when someone is staging an
// update to the same key, say inserting it and deleting it over and
// over, and then if they are doing it as part of transaction, we'd only
// attempt once for each batch of writes. We could test the partition
// before the entry into the housekeeping queue but then we have a
// racing unit test to write to get this branch to execute, so I won't
// bother until someone actually complains. It would mean a stage with
// 100s of updates to one key that occur before the stage can merge
// before start to his this early exit.
if (partition == null) {
cartridges.forEach(cartridge => cartridge.release())
right.cartridge.remove()
return
}
const items = left.page.items.splice(partition)
right.page.key = this.comparator.zero(items[0].key)
right.page.items = items
right.page.right = left.page.right
right.cartridge.heft = items.reduce((sum, item) => sum + item.heft, 1)
// Set the right key of the left page.
left.page.right = right.page.key
left.cartridge.heft -= right.cartridge.heft - 1
// Set the heft of the left page and entry. Moved this down.
// child.entry.heft -= heft - 1
// Insert a reference to the right page in the parent branch page.
// Use an approximate heft for writeahead only storage, recalculated
// for file system storage.
//
parent.page.items.splice(parent.index + 1, 0, {
key: right.page.key,
id: right.page.id,
heft: parent.page.items[parent.page.items.length - 1].heft
})
// If any of the pages is still larger than the split threshhold, check
// the split again.
for (const page of [ left.page, right.page ]) {
if (
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
) {
this._fracture.enqueue('keephouse').value.candidates.push(page.key || page.items[0].key)
}
}
//
// Write any queued writes, they would have been in memory, in the page
// that was split above. We based our split on these writes.
//
const writes = []
for (const entries of pauses[0].entries) {
writes.push.apply(writes, entries.writes.splice(0))
}
writes.push.apply(writes)
//
this._balanceIf(parent, messages, { method: 'balance', key: key, level: parent.level })
// Once we await our synchronous operations are over. The user can
// append new writes to the existing queue entry.
//
// All user operations are synchronous, operating on a page after a
// synchronous descent with no async operations allowed while they
// hold the page. This means we do not have to worry about splitting
// a page out from under them.
//
// Thus, the first asynchronous action is a synchronous lock release
// of a sort, the user can now change the page in memory. We have
// still paused all writes to both the left and right pages and we
// are in a hurry to release that lock.
//
await this.storage.writeSplitLeaf({ key, left, right, parent, writes, messages })
//
} finally {
//
// **TODO** We probably don't want to release our locks, it just
// means that work proceeds in some fashion that causes problems,
// and how will our appender strand know that this strand is in a
// bad way? Can we have an errored flag on the destructible?
// We can resume writing. Everything else is going to happen to log
// files are are not write contended.
//
pauses.forEach(pause => pause.resume())
//
}
//
// We run this function to continue balancing the tree.
//
await this.storage.balance(this, displace)
}
// **TODO** Something is wrong here. We're using `child.right` to find the a
// right branch page but the leaf and and it's right sibling can always be
// under the same branch. How do we really go right?
//
// **TODO** The above is a major problem. This is super broken. We may end
// up merging a page into nothing.
//
// **TODO** Regarding the above. Stop and think about it and you can see
// that you can always pick up the right key of the page at a particular
// level as you descend the tree. On the way down, update a right variable
// with the id of the page for the node to the right of the node you
// followed if one exists. If the page you followed is at the end of the
// array do not update it. Wait... Is that what `child.right` is here? Heh.
// It might well be. I see am tracking right as I descend.
//
// **TODO** LOL at all that above and if you're smarter when you wrote the
// code than when you wrote these comments, rewrite all this into a
// description so you don't do this again.
//
async _selectMerger (key, child, entries) {
const level = child.entry.value.leaf ? -1 : child.level
const left = await this.descend({ key, level, fork: true }, entries)
const right = child.right == null
? null
: await this.descend({ key: child.right, level }, entries)
const mergers = []
if (left != null) {
mergers.push({
items: left.entry.value.items,
key: child.entry.value.key || child.entry.value.items[0].key,
level: level
})
}
if (right != null) {
mergers.push({
items: right.entry.value.items,
count: right.entry.value.items.length,
key: child.right,
level: level
})
}
return mergers
.filter(merger => this.comparator.branch(merger.items[0].key, merger.items[merger.items.length - 1].key) != 0)
.sort((left, right) => left.items.length - right.items.length)
.shift()
}
_isDirty (page, sizes) {
return (
page.items.length >= sizes.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= sizes.merge
)
}
async _surgery (right, pivot) {
const surgery = {
deletions: [],
replacement: null,
splice: pivot
}
// If the pivot is somewhere above we need to promote a key, unless all
// the branches happen to be single entry branches.
if (right.level - 1 != pivot.level) {
let level = right.level - 1
do {
const ancestor = this.descend({ key, level }, entries)
if (ancestor.page.items.length == 1) {
surgery.deletions.push(ancestor)
} else {
// **TODO** Also null out after splice.
assert.equal(ancestor.index, 0, 'unexpected ancestor')
surgery.replacement = ancestor.page.items[1].key
surgery.splice = ancestor
}
level--
} while (surgery.replacement == null && level != right.pivot.level)
}
return surgery
}
// **TODO** The way that I'm journaling these balances, I need to ensure
// that I am not journaling a page that will be deleted. Something like
// right then left then parent, because if we go left to right the left page
// may choose to merge with its right sibling deleting it. If the right page
// choose to merge with the left sibling it will delete itself. No, no.
// We're going by keys, so we're not going to load a deleted page. But, the
// descent logic depends on nagivating by the least key in the branch page,
// so we need to be sure to check that we hit the correct key.
// Easiest way to keep from having a bunch of tests we have to hit..
// We check as to whether or not to add the merge, so we're not building up
// a great big list, just... If we are going to try to merge this page
// again, we will check the parent after we merge again. We have to move
// merge selection into branch merge so that if we can't merge, we still
// check the parent. For split we can always check the parent and then the
// left and right we are only ever adding pages. What about the case where
// split and then possibly merge? We should see if we shouldn't spam the
// balance queue and then see if don't luck out and hit the cancel
// condition.
// Fill root will delete a child. Plus, we have an ever growing list of
// possible balance operations so we have to link about what is already in
// the list.
//
async _fillRoot (messages, cartridges) {
const root = await this.descend({ key: null, level: 0 }, cartridges)
const child = await this.descend({ key: null, level: 1 }, cartridges)
root.page.items = child.page.items
messages.forEach(message => message.level--)
await this.storage.writeFillRoot({ root, child, messages })
}
async _mergeBranch ({ key, level }, messages, cartridges) {
// **TODO** We don't have to worry. If we go right first, it will have a
// pivot and if so it has a left, if not it has no left. EXCEPT we just
// got this from merger selection so we know it is good, what is going
// on in merger selection?
const left = await this.descend({ key, level, fork: true }, cartridges)
const right = await this.descend({ key, level }, cartridges)
const pivot = await this.descend(right.pivot, cartridges)
const surgery = await this._surgery(right, pivot)
right.page.items[0].key = key
left.page.items.push.apply(left.page.items, right.page.items)
// Replace the key of the pivot if necessary.
if (surgery.replacement != null) {
pivot.page.items[pivot.index].key = surgery.replacement
}
// Remove the branch page that references the leaf page.
surgery.splice.page.items.splice(surgery.splice.index, 1)
// If the splice index was zero, null the key of the new left most branch.
if (surgery.splice.index == 0) {
surgery.splice.page.items[0].key = null
}
//
// **TODO** This needs to be tested. With some confidence in the pivot
// logic I'm going to use the pivot of the left and the splice to find
// them. The key for a branch page that is not the right most path
// always going to be the pivot.
//
if (left.pivot == null) {
this._balanceIf(left, messages, { method: 'balance', key: null, level: level })
} else {
this._balanceIf(left, messages, { method: 'balance', key: left.pivot.key, level: level })
}
if (surgery.splice.pivot == null) {
this._balanceIf(surgery.splice, messages, { method: 'balance', key: null, level: surgery.splice.level })
} else {
this._balanceIf(surgery.splice, messages, { method: 'balance', key: surgery.splice.pivot.key, level: surgery.splice.level })
}
await this.storage.writeMergeBranch({ key, left, right, pivot, surgery })
}
//
// The thing is this. Whenever I fiddle around serious with this code, I'll
// introduce a bug, I mean, just while editing I'll hit "x" in `vim` and
// delete a character, and when I run the test I'll get all kinds of evil.
// What I'm finding now is that there will be infinite loops when I release
// the pause in the finally block, but the pause enqueues a new entry when
// you resume it and then the appender sees that the page needs to merge so
// we come back here. This is only for a programmer error while editing.
// In practice, though, if there is a failure to write the journal, how do
// we proceed? Really leaning heavy on leaving the queue paused. The user
// will know the writes didn't finish, ah, no they won't.
// Might release the cartridges, but generally feel like we should leave
// the...
// Okay, here is where we could start to use the shutdown behavior. We might
// have a directory and anything that is dirty, we mkdir the name of the
// dirty page, so we continue to flush, but we stop balancing. Let's do
// this.
//
async _mergeLeaf (pause, { key, level }, cartridges, displace) {
const left = await this.descend({ key, level, fork: true }, cartridges)
const right = await this.descend({ key, level }, cartridges)
const pivot = await this.descend(right.pivot, cartridges)
const surgery = await this._surgery(right, pivot)
const messages = [{ method: 'balance', key: key, level: surgery.splice.level }]
const pauses = []
try {
pauses.push(await pause(left.page.id))
pauses.push(await pause(right.page.id))
// Add the items in the right page to the end of the left page.
const items = left.page.items
const merged = right.page.items.splice(0)
items.push.apply(items, merged)
// Set right reference of left page.
left.page.right = right.page.right
// Adjust heft of left entry.
left.cartridge.heft += right.cartridge.heft - 1
// TODO Remove after a while, used only for assertion in `Cache`.
right.cartridge.heft -= merged.reduce((sum, item) => sum + item.heft, 0)
// Mark the right page deleted, it will cause `indexOf` in the `Cursor`
// to return `null` indicating that the user must release the `Cursor`
// and descend again.
// **TODO** No longer necessary, right?
right.page.deleted = true
// See if the merged page needs to split or merge further.
if (this._isDirty(left.page, this.leaf)) {
this._fracture.enqueue('keephouse').candidates.push(left.entry.value.items[0].key)
}
// Replace the key of the pivot if necessary.
if (surgery.replacement != null) {
pivot.page.items[pivot.index].key = surgery.replacement
}
// Remove the branch page that references the leaf page.
surgery.splice.page.items.splice(surgery.splice.index, 1)
if (surgery.splice.index == 0) {
surgery.splice.page.items[0].key = null
}
//
// Because user updates are synchronous from descent when we go
// async any user writes will go to our new merged page. We do need
// to write the existing writes before we perform our merge.
//
const writes = { left: [], right: [] }
for (const entry of pauses[0].entries) {
writes.left.push.apply(writes.left, entry.writes.splice(0))
}
for (const entry of pauses[1].entries) {
writes.right.push.apply(writes.right, entry.writes.splice(0))
}
//
await this.storage.writeMergeLeaf({ left, right, surgery, pivot, writes, messages })
//
} finally {
pauses.forEach(pause => pause.resume())
}
await this.storage.balance(this, displace)
}
//
// **TODO** `copacetic` could go like this...
// We do a flat iteration of the tree from `0.1` following the right page.
// We first go through every directory and ensure that there is no directory
// named `seen` deleting it if it exists.
// Then we iterate and mark as `seen` every directory we visit.
// When we visit we assert that the page is correctly sorted. We then return
// the items to the user so the user can examine the entries.
// We then look for any directories that are unseen and assert that they are
// `merged` files.
// We can then iterate through the pages again and vacuum all pages that
// need to be vacuumed. We know assert that the pages are vacuumed
// correctly, nowhere does a page reference a page outside of its own
// directory.
// Then we can look at the unseen pages and see if any of them reference any
// of the `merged` files. If not we can delete the merged files.
//
async _keephouse (pause, canceled, { candidates }, displace) {
await this.deferrable.copacetic($ => $(), 'append', null, async () => {
this.deferrable.progress()
if (canceled) {
candidates.forEach(candidate => this._canceled.add(candidate))
} else {
for (const key of candidates) {
const cartridges = []
try {
const child = await this.descend({ key }, cartridges)
if (child.entry.value.items.length >= this.leaf.split) {
await this._splitLeaf(pause, key, child, cartridges, displace)
} else if (
! (
child.entry.value.id == '0.1' && child.entry.value.right == null
) &&
child.entry.value.items.length <= this.leaf.merge
) {
const merger = await this._selectMerger(key, child, cartridges)
if (merger != null) {
await this._mergeLeaf(pause, merger, cartridges, displace)
}
}
} finally {
cartridges.forEach(cartridge => cartridge.release())
}
}
}
})
}
_fractured ({ pause, canceled, key, value, displace }) {
switch (key) {
case 'keephouse':
return this._keephouse(pause, canceled, value, displace)
default:
return this._append(canceled, key, value)
}
}
}
module.exports = Sheaf
| sheaf.js | // **TODO** Need to ensure partition does not result in endless balance loops.
// **TODO** Vacuum delete-heavy pages.
// **TODO** Really for write-ahead, append the block list so you can read it
// quickly and deserialize it quickly. You can write each block list at a
// position in the log and write out a map to the position of the block list
// indexed by key. This takes advantage of scatter-read over sequential write
// that is now in vogue.
'use strict'
// Sort function generator.
const ascension = require('ascension')
// Comparator decorator that extracts the sorted fields from an object.
const whittle = require('whittle')
// Node.js API.
const assert = require('assert')
const path = require('path')
const fileSystem = require('fs')
const fs = require('fs').promises
// Return the first non null-like value.
const { coalesce } = require('extant')
// An `async`/`await` work queue.
const Turnstile = require('turnstile')
// Journaled file system operations for tree rebalancing.
const Journalist = require('journalist')
// A pausable service work queue that shares a common application work queue.
const Fracture = require('fracture')
// A non-crypographic (fast) 32-bit hash for record integrity.
// const fnv = require('./fnv')
// Serialize a single b-tree record.
const Recorder = require('transcript/recorder')
// Incrementally read a b-tree page chunk by chunk.
const Player = require('transcript/player')
// Catch nested exceptions by type, message and properties.
const rescue = require('rescue')
// A Promise wrapper that captures `resolve` and `reject`.
const Future = require('perhaps')
// Binary search for a record in a b-tree page.
const find = require('./find')
// Partition a leaf page according to user's desired groupings.
const Partition = require('./partition')
// An `Error` type specific to Strata.
const Strata = { Error: require('./error') }
//
// Sheaf is the crux of Strata. It exists as a separate object possibly for
// legacy reasons, and it will stay that way because it makes `Strata` and
// `Cursor` something a user can read to understand the interface.
//
class Sheaf {
static options (options) {
const leaf = coalesce(options.leaf, {})
options.leaf = {
split: coalesce(leaf.split, 5),
merge: coalesce(leaf.merge, 1)
}
const branch = coalesce(options.branch, {})
options.branch = {
split: coalesce(branch.split, 5),
merge: coalesce(branch.merge, 1)
}
options.comparator = function () {
const zero = object => object
if (options.comparator == null) {
const comparator = whittle(ascension([ String ]), value => [ value ])
return { leaf: comparator, branch: comparator, zero }
} else if (typeof options.comparator == 'function') {
return { leaf: options.comparator, branch: options.comparator, zero }
} else {
return options.comparator
}
} ()
return options
}
// Sheaf accepts the destructible and user options passed to `new Strata`
constructor (destructible, options) {
Strata.Error.assert(options.turnstile != null, 'OPTION_REQUIRED', { _option: 'turnstile' })
this.options = Sheaf.options(options)
this.pages = options.pages
this.comparator = options.comparator
this.leaf = options.leaf
this.branch = options.branch
this.storage = options.storage
this.storage.deferrable.increment()
this._id = 0
this._root = this._create({ id: -1, leaf: false, items: [{ id: '0.0' }] }, [])
this._root.cartridge.heft = 1
this.destructible = destructible
this.deferrable = destructible.durable($ => $(), { countdown: 1 }, 'deferrable')
// **TODO** Do not worry about wrapping anymore.
// Operation id wraps at 32-bits, cursors should not be open that long.
this._operationId = 0xffffffff
// Concurrency and work queues. One keyed queue for page writes, the
// other queue will only use a single key for all housekeeping.
this._fracture = new Fracture(destructible.durable($ => $(), 'appender'), {
turnstile: options.turnstile,
value: key => {
switch (key) {
case 'keephouse':
return { candidates: [] }
default:
return {
id: this._operationId = (this._operationId + 1 & 0xffffffff) >>> 0,
writes: [],
cartridge: this.pages.hold(key),
future: new Future
}
}
},
worker: this._fractured.bind(this),
cancel: ({ key, value }) => {
if (key == 'append') {
value.cartridge.release()
}
}
})
this._fracture.deferrable.increment()
// **TODO** Not yet used, would `mkdir` any pages that need to be
// inspected for balance.
this._canceled = new Set
destructible.destruct(() => this.deferrable.decrement())
// This used to remove the root page but we have a race with reads which
// are not tracked with Destructible nor Fracture so we have no way to
// wait for reads to drain and we've always imagined that maybe reads
// could be considered entirely independent of writes, continuing after
// write failure, or maybe having a read-only tree, so we don't fight
// this one. In applications we can assert that the cache is zero after
// everything that could be reading has shut down.
this.deferrable.destruct(() => {
this.deferrable.ephemeral('shutdown', async () => {
// Trying to figure out how to wait for the Turnstile to drain.
// We can't terminate the housekeeping turnstile then the
// acceptor turnstile because they depend on each other, so
// we're going to loop. We wait for one to drain, then the
// other, then check to see if anything is in the queues to
// determine if we can leave the loop. Actually, we only need to
// check the size of the first queue in the loop, the second
// will be empty when `drain` returns.
//
// **TODO** Really want to just push keys into a file for
// inspection when we reopen for housekeeping.
//
// **TODO** This is why we finally block cartridge release. We get
// remove error here if we don't.
await this.drain()
this.track = true
this._fracture.deferrable.decrement()
this.storage.deferrable.decrement()
if (this._root != null) {
this._root.cartridge.release()
this._root = null
}
})
})
}
read (id) {
return this.storage.read(id)
}
//
// We load the page then check for a race after we've loaded. If a different
// strand beat us to it, we just ignore the result of our read and return
// the cached entry.
//
async load (id) {
const { page, heft } = await this.read(id)
const entry = this.pages.hold(id, null)
if (entry.value == null) {
entry.value = page
entry.heft = heft
}
return entry
}
_create (page, cartridges) {
const cartridge = this.pages.hold(page.id, page)
cartridges.push(cartridge)
return { page: cartridge.value, cartridge }
}
_descend (entries, { key, level = -1, fork = false, rightward = false, approximate = false }) {
const descent = { miss: null, keyed: null, level: 0, index: 0, entry: null,
pivot: null,
cartridge: null,
page: null
}
let entry = null
entries.push(entry = this.pages.hold(-1))
for (;;) {
// When you go rightward at the outset or fork you might hit this
// twice, but it won't matter because you're not going to use the
// pivot anyway.
//
// You'll struggle to remember this, but it is true...
//
if (descent.index != 0) {
//
// The last key we visit is the key for the leaf page for whatever
// level we stop at. This holds true even if we fork. We hold onto
// the previous pivot and if the left fork is not not the zero index
// of the branch page, then the previous pivot is the key for the
// leaf of the fork. Note that for balancing, we only fork when we
// match the exact key in a branch. We have an approximate fork for
// the user in case we eliminate the leaf page with a merge, they
// will land in the merged page at the first index less than the
// key. The right key tracking will also be correct since we will
// immediately pick up a right key when we leave this block.
//
const pivot = descent.pivot
descent.pivot = {
key: entry.value.items[descent.index].key,
level: descent.level - 1
}
//
// If we're trying to find siblings we're using an exact key that is
// definitely above the level sought, we'll see it and then go left
// or right if there is a branch in that direction.
//
// Earlier I had this at KILLROY below. And I adjust the level, but
// I don't reference the level, so it's probably fine here.
//
if (this.comparator.branch(descent.pivot.key, key) == 0 && fork) {
descent.index--
rightward = true
descent.pivot = descent.index != 0
? { key: entry.value.items[descent.index].key, level: descent.level - 1 }
: pivot
}
}
// You don't fork right. You can track the rightward key though.
if (descent.index + 1 < entry.value.items.length) {
descent.right = entry.value.items[descent.index + 1].key
}
// We exit at the leaf, so this will always be a branch page.
const id = entry.value.items[descent.index].id
// Attempt to hold the page from the cache, return the id of the
// page if we have a cache miss.
entry = this.pages.hold(id)
if (entry == null) {
return { miss: id }
}
entries.push(entry)
// TODO Move this down below the leaf return and do not search if
// we are searching for a leaf.
// Binary search the page for the key, or just go right or left
// directly if there is no key.
const offset = entry.value.leaf ? 0 : 1
const index = rightward
? entry.value.leaf ? ~(entry.value.items.length - 1) : entry.value.items.length - 1
: key != null
? find(this.comparator.leaf, entry.value.items, key, offset)
: entry.value.leaf ? ~0 : 0
//
// If the page is a leaf, assert that we're looking for a leaf and
// return the leaf page.
//
if (entry.value.leaf) {
descent.found = index >= 0
descent.index = index < 0 ? ~index : index
assert.equal(level, -1, 'could not find branch')
break
}
//
// If the index is less than zero we didn't find the exact key, so
// we're looking at the bitwise not of the insertion point which is
// right after the branch we're supposed to descend, so back it up
// one.
//
descent.index = index < 0 ? ~index - 1 : index
// We're trying to reach branch and we've hit the level.
if (level == descent.level) {
break
}
// KILLROY was here.
descent.level++
}
//
// **TODO** What happens when we merge a leaf page so that the key is
// gone and then we delete all the values before the key? Essentially,
// what is the effect of searching for a key that is not a leaf key
// whose value is greater than the leaf key it lands on and less than
// the least value in the page? We can test this without branch races.
// If it is `-1` that's fine. You're not supposed to fork to find an
// insert location. I believe `-1` is a stop for reverse iteration.
// Write a test and come back and document this with more confidence.
//
if (fork && !rightward) {
if (approximate) {
descent.index--
descent.found = false
} else {
return null
}
}
return descent
}
//
// We hold onto the entries array for the descent to prevent the unlikely
// race condition where we cannot descend because we have to load a page,
// but while we're loading a page another page in the descent unloads.
//
// Conceivably, this could continue indefinitely.
//
async descend (query, callerEntries, internal = true) {
const entries = [[]]
for (;;) {
entries.push([])
const descent = this._descend(entries[1], query)
entries.shift().forEach(entry => entry.release())
if (descent == null) {
entries.shift().forEach((entry) => entry.release())
return null
}
if (descent.miss == null) {
callerEntries.push(descent.entry = entries[0].pop())
entries.shift().forEach(entry => entry.release())
descent.cartridge = descent.entry
descent.page = descent.cartridge.value
return descent
}
const entry = await this.load(descent.miss)
entries[0].push(entry)
}
}
//
// You keep trying to make the catch block a finally block but then notice
// that the release of the entries is conditional not not missing. That is,
// if you miss you don't want to release the entries, but a finally block
// always releases the entries.
//
descend2 (trampoline, query, found) {
this.deferrable.operational()
const entries = []
try {
const descent = this._descend(entries, query)
if (descent.miss) {
trampoline.promised(async () => {
try {
entries.push(await this.deferrable.destructive('load', this.load(descent.miss)))
this.descend2(trampoline, query, found)
} finally {
entries.forEach(entry => entry.release())
}
})
} else {
if (descent != null) {
descent.entry = entries.pop()
}
entries.forEach(entry => entry.release())
entries.length = 0
found(descent)
}
} catch (error) {
entries.forEach(entry => entry.release())
throw error
}
}
// Writes appear to be able to run with impunity. What was the logic there?
// Something about the leaf being written to synchronously, but if it was
// asynchronous, then it is on the user to assert that the page has not
// changed.
//
// The block will wait on a promise release preventing any of the writes
// from writing.
//
// Keep in mind that there is only one housekeeper, so that might factor
// into the logic here.
//
// Can't see what's preventing writes from becoming stale. Do I ensure that
// they are written before the split? Must be.
//
async _append (canceled, key, { writes, cartridge, future }) {
await this.deferrable.copacetic($ => $(), 'append', null, async () => {
try {
this.deferrable.progress()
const page = cartridge.value
if (
(
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= this.leaf.merge
)
) {
this._fracture.enqueue('keephouse').value.candidates.push(page.key || page.items[0].key)
}
await this.storage.writeLeaf(page, writes)
} finally {
cartridge.release()
future.resolve()
}
})
}
append (id, buffer, writes) {
this.deferrable.operational()
const append = this._fracture.enqueue(id)
append.value.writes.push(buffer)
// **TODO** This is broken now for write-ahead since it is synchronous.
// You would have to wait on a flush of the write-ahead log.
// **TODO** So it would appear that the fractures should move somehow
// into filesystem.
if (writes[append.id] == null) {
writes[append.id] = append.completed
}
}
drain () {
return this._fracture.drain()
}
_unbalanced (page) {
return page.leaf
? (
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= this.leaf.merge
)
: (
page.items.length >= this.branch.split
)
||
(
page.id == '0.0'
? +page.items[0].id.split('.')[1] % 2 == 0 && page.items.length == 1
: page.items.length <= this.branch.merge
)
}
_balanceIf (branch, messages, message) {
if (this._unbalanced(branch.page)) {
messages.push(message)
}
}
// Assume there is nothing to block or worry about with the branch pages.
// Can't recall at the moment, though. Descents are all synchronous.
//
// You've come back to this and it really bothers you that these slices are
// performed twice, once in the journalist and once in the commit. You
// probably want to let this go for now until you can see clearly how you
// might go about eliminating this duplication. Perhaps the commit uses the
// journalist to descend, lock, etc. just as the Cursor does. Or maybe the
// Journalist is just a Sheaf of pages, which does perform the leaf write,
// but defers to the Commit, now called a Journalist, to do the splits.
//
// It is not the case that the cached information is in some format that is
// not ready for serialization. What do we get exactly? What we'll see at
// first is that these two are calling each other a lot, so we're going to
// probably want to move more logic back over to Commit, including leaf
// splits. It will make us doubt that we could ever turn this easily into an
// R*Tree but the better the architecture, the easier it will be to extract
// components for reuse as modules, as opposed to making this into some sort
// of pluggable framework.
//
// Maybe it just came to me. Why am I logging `drain`, `fill`, etc? The
// commit should just expose `emplace` and the journalist can do the split
// and generate the pages and then the Commit is just journaled file system
// operations. It won't even update the heft, it will just return the new
// heft and maybe it doesn't do the page reads either.
//
// We'd only be duplicating the splices, really.
//
async _drainRoot (messages, cartridges) {
const root = await this.descend({ key: null, level: 0 }, cartridges)
const partition = Math.floor(root.entry.value.items.length / 2)
const left = this._create({
id: this.storage.nextId(false),
offset: 1,
items: root.page.items.slice(0, partition),
hash: null,
stop: 0
}, cartridges)
const right = this._create({
id: this.storage.nextId(false),
offset: 1,
items: root.page.items.slice(partition),
hash: null,
stop: 0
}, cartridges)
root.page.items = [{
id: left.page.id,
key: null,
heft: left.page.items[0].heft
}, {
id: right.page.id,
key: right.page.items[0].key,
heft: left.page.items[0].heft
}]
right.page.items[0].key = null
right.page.items[0].heft = left.page.items[0].heft
messages.forEach(message => message.level++)
this._balanceIf(left, messages, { method: 'balance', key: null, level: 1 })
this._balanceIf(right, messages, { method: 'balance', key: root.page.items[1].key, level: 1 })
await this.storage.writeDrainRoot({ left, right, root })
}
async balance (key, level, messages, cartridges) {
const branch = await this.descend({ key, level }, cartridges)
const leaves = +branch.page.items[0].id.split('.')[1] % 2 == 1
if (branch.page.items.length >= this.branch.split) {
if (branch.page.id == '0.0') {
await this._drainRoot(messages, cartridges)
} else {
await this._splitBranch(key, level, messages, cartridges)
}
} else if (branch.page.items.length <= this.branch.merge) {
if (branch.page.id != '0.0') {
const merger = await this._selectMerger(key, branch, cartridges)
await this._mergeBranch(merger, messages, cartridges)
} else if (! leaves && branch.page.items.length == 1) {
await this._fillRoot(messages, cartridges)
}
}
}
async _splitBranch (key, level, messages, cartridges) {
const left = await this.descend({ key, level }, cartridges)
const parent = await this.descend({ key, level: level - 1 }, cartridges)
const partition = Math.floor(left.page.items.length / 2)
const right = this._create({
id: this.storage.nextId(false),
items: left.page.items.splice(partition),
leaf: false,
stop: 0
}, cartridges)
const promotion = right.page.items[0].key
right.page.items[0].key = null
left.page.items = left.page.items.splice(0, partition)
parent.page.items.splice(parent.index + 1, 0, {
key: promotion,
id: right.page.id,
heft: parent.page.items[parent.page.items.length - 1].heft
})
this._balanceIf(left, messages, { method: 'balance', key: key, level: level })
this._balanceIf(right, messages, { method: 'balance', key: promotion, level: level })
this._balanceIf(parent, messages, { method: 'balance', key: key, level: level - 1 })
await this.storage.writeSplitBranch({ promotion, left, right, parent })
}
//
// **TODO** This is what we'll call a vacuum for the sake of removing delete
// messages.
//
async _rotate () {
}
//
// Split leaf. We always split a new page off to the right. Because we
// always merge two pages together into the left page our left-most page id
// will never change, it will always be `0.1`.
//
// Split is performed by creating two new stub append log. One for the
// existing page which is now the left page and one for the new right page.
// When either of these pages loads they will load the old existing page,
// then split the page and continue with new records added to the subsequent
// append log.
//
async _splitLeaf (pause, key, left, cartridges, displace) {
// Descend to the parent branch page.
const parent = await this.descend({ key, level: left.level - 1 }, cartridges)
// Create the right page now so we can lock it. We're going to
// synchronously add it to the tree and then do the housekeeping to
// persist the split asynchronously. While we're async, someone could
// descend the tree and start writing. In fact, this is very likely to
// happen during a batch insert by the user.
const right = this._create({
id: this.storage.nextId(true),
leaf: true,
items: [],
right: null,
dependents: {},
key: null,
log: null,
stop: 0
}, cartridges)
const messages = []
// Create our journaled tree alterations.
const pauses = []
try {
pauses.push(await pause(left.page.id))
pauses.push(await pause(right.page.id))
// Race is the wrong word, it's our synchronous time. We have to split
// the page and then write them out. Anyone writing to this leaf has to
// to be able to see the split so that they surrender their cursor if
// their insert or delete belongs in the new page, not the old one.
//
// Notice that all the page manipulation takes place before the first
// write. Recall that the page manipulation is done to the page in
// memory which is offical, the page writes are lagging.
// Split page creating a right page.
const length = left.page.items.length
const partition = Partition(this.comparator.branch, left.page.items)
// If we cannot partition because the leaf and branch have different
// partition comparators and the branch comparator considers all keys
// identical, we give up and return. We will have gone through the
// housekeeping queue to get here, and if the user keeps inserting keys
// that are identical according to the branch comparator, we'll keep
// making our futile attempts to split. Currently, though, we're only
// going to see this behavior in Amalgamate when someone is staging an
// update to the same key, say inserting it and deleting it over and
// over, and then if they are doing it as part of transaction, we'd only
// attempt once for each batch of writes. We could test the partition
// before the entry into the housekeeping queue but then we have a
// racing unit test to write to get this branch to execute, so I won't
// bother until someone actually complains. It would mean a stage with
// 100s of updates to one key that occur before the stage can merge
// before start to his this early exit.
if (partition == null) {
cartridges.forEach(cartridge => cartridge.release())
right.cartridge.remove()
return
}
const items = left.page.items.splice(partition)
right.page.key = this.comparator.zero(items[0].key)
right.page.items = items
right.page.right = left.page.right
right.cartridge.heft = items.reduce((sum, item) => sum + item.heft, 1)
// Set the right key of the left page.
left.page.right = right.page.key
left.cartridge.heft -= right.cartridge.heft - 1
// Set the heft of the left page and entry. Moved this down.
// child.entry.heft -= heft - 1
// Insert a reference to the right page in the parent branch page.
// Use an approximate heft for writeahead only storage, recalculated
// for file system storage.
//
parent.page.items.splice(parent.index + 1, 0, {
key: right.page.key,
id: right.page.id,
heft: parent.page.items[parent.page.items.length - 1].heft
})
// If any of the pages is still larger than the split threshhold, check
// the split again.
for (const page of [ left.page, right.page ]) {
if (
page.items.length >= this.leaf.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
) {
this._fracture.enqueue('keephouse').value.candidates.push(page.key || page.items[0].key)
}
}
//
// Write any queued writes, they would have been in memory, in the page
// that was split above. We based our split on these writes.
//
const writes = []
for (const entries of pauses[0].entries) {
writes.push.apply(writes, entries.writes.splice(0))
}
writes.push.apply(writes)
//
this._balanceIf(parent, messages, { method: 'balance', key: key, level: parent.level })
// Once we await our synchronous operations are over. The user can
// append new writes to the existing queue entry.
//
// All user operations are synchronous, operating on a page after a
// synchronous descent with no async operations allowed while they
// hold the page. This means we do not have to worry about splitting
// a page out from under them.
//
// Thus, the first asynchronous action is a synchronous lock release
// of a sort, the user can now change the page in memory. We have
// still paused all writes to both the left and right pages and we
// are in a hurry to release that lock.
//
await this.storage.writeSplitLeaf({ key, left, right, parent, writes, messages })
//
} finally {
//
// **TODO** We probably don't want to release our locks, it just
// means that work proceeds in some fashion that causes problems,
// and how will our appender strand know that this strand is in a
// bad way? Can we have an errored flag on the destructible?
// We can resume writing. Everything else is going to happen to log
// files are are not write contended.
//
pauses.forEach(pause => pause.resume())
//
}
//
// We run this function to continue balancing the tree.
//
await this.storage.balance(this, displace)
}
// **TODO** Something is wrong here. We're using `child.right` to find the a
// right branch page but the leaf and and it's right sibling can always be
// under the same branch. How do we really go right?
//
// **TODO** The above is a major problem. This is super broken. We may end
// up merging a page into nothing.
//
// **TODO** Regarding the above. Stop and think about it and you can see
// that you can always pick up the right key of the page at a particular
// level as you descend the tree. On the way down, update a right variable
// with the id of the page for the node to the right of the node you
// followed if one exists. If the page you followed is at the end of the
// array do not update it. Wait... Is that what `child.right` is here? Heh.
// It might well be. I see am tracking right as I descend.
//
// **TODO** LOL at all that above and if you're smarter when you wrote the
// code than when you wrote these comments, rewrite all this into a
// description so you don't do this again.
//
async _selectMerger (key, child, entries) {
const level = child.entry.value.leaf ? -1 : child.level
const left = await this.descend({ key, level, fork: true }, entries)
const right = child.right == null
? null
: await this.descend({ key: child.right, level }, entries)
const mergers = []
if (left != null) {
mergers.push({
items: left.entry.value.items,
key: child.entry.value.key || child.entry.value.items[0].key,
level: level
})
}
if (right != null) {
mergers.push({
items: right.entry.value.items,
count: right.entry.value.items.length,
key: child.right,
level: level
})
}
return mergers
.filter(merger => this.comparator.branch(merger.items[0].key, merger.items[merger.items.length - 1].key) != 0)
.sort((left, right) => left.items.length - right.items.length)
.shift()
}
_isDirty (page, sizes) {
return (
page.items.length >= sizes.split &&
this.comparator.branch(page.items[0].key, page.items[page.items.length - 1].key) != 0
)
||
(
! (page.id == '0.1' && page.right == null) &&
page.items.length <= sizes.merge
)
}
async _surgery (right, pivot) {
const surgery = {
deletions: [],
replacement: null,
splice: pivot
}
// If the pivot is somewhere above we need to promote a key, unless all
// the branches happen to be single entry branches.
if (right.level - 1 != pivot.level) {
let level = right.level - 1
do {
const ancestor = this.descend({ key, level }, entries)
if (ancestor.page.items.length == 1) {
surgery.deletions.push(ancestor)
} else {
// **TODO** Also null out after splice.
assert.equal(ancestor.index, 0, 'unexpected ancestor')
surgery.replacement = ancestor.page.items[1].key
surgery.splice = ancestor
}
level--
} while (surgery.replacement == null && level != right.pivot.level)
}
return surgery
}
// **TODO** The way that I'm journaling these balances, I need to ensure
// that I am not journaling a page that will be deleted. Something like
// right then left then parent, because if we go left to right the left page
// may choose to merge with its right sibling deleting it. If the right page
// choose to merge with the left sibling it will delete itself. No, no.
// We're going by keys, so we're not going to load a deleted page. But, the
// descent logic depends on nagivating by the least key in the branch page,
// so we need to be sure to check that we hit the correct key.
// Easiest way to keep from having a bunch of tests we have to hit..
// We check as to whether or not to add the merge, so we're not building up
// a great big list, just... If we are going to try to merge this page
// again, we will check the parent after we merge again. We have to move
// merge selection into branch merge so that if we can't merge, we still
// check the parent. For split we can always check the parent and then the
// left and right we are only ever adding pages. What about the case where
// split and then possibly merge? We should see if we shouldn't spam the
// balance queue and then see if don't luck out and hit the cancel
// condition.
// Fill root will delete a child. Plus, we have an ever growing list of
// possible balance operations so we have to link about what is already in
// the list.
//
async _fillRoot (messages, cartridges) {
const root = await this.descend({ key: null, level: 0 }, cartridges)
const child = await this.descend({ key: null, level: 1 }, cartridges)
root.page.items = child.page.items
messages.forEach(message => message.level--)
await this.storage.writeFillRoot({ root, child, messages })
}
async _mergeBranch ({ key, level }, messages, cartridges) {
// **TODO** We don't have to worry. If we go right first, it will have a
// pivot and if so it has a left, if not it has no left. EXCEPT we just
// got this from merger selection so we know it is good, what is going
// on in merger selection?
const left = await this.descend({ key, level, fork: true }, cartridges)
const right = await this.descend({ key, level }, cartridges)
const pivot = await this.descend(right.pivot, cartridges)
const surgery = await this._surgery(right, pivot)
right.page.items[0].key = key
left.page.items.push.apply(left.page.items, right.page.items)
// Replace the key of the pivot if necessary.
if (surgery.replacement != null) {
pivot.page.items[pivot.index].key = surgery.replacement
}
// Remove the branch page that references the leaf page.
surgery.splice.page.items.splice(surgery.splice.index, 1)
// If the splice index was zero, null the key of the new left most branch.
if (surgery.splice.index == 0) {
surgery.splice.page.items[0].key = null
}
//
// **TODO** This needs to be tested. With some confidence in the pivot
// logic I'm going to use the pivot of the left and the splice to find
// them. The key for a branch page that is not the right most path
// always going to be the pivot.
//
if (left.pivot == null) {
this._balanceIf(left, messages, { method: 'balance', key: null, level: level })
} else {
this._balanceIf(left, messages, { method: 'balance', key: left.pivot.key, level: level })
}
if (surgery.splice.pivot == null) {
this._balanceIf(surgery.splice, messages, { method: 'balance', key: null, level: surgery.splice.level })
} else {
this._balanceIf(surgery.splice, messages, { method: 'balance', key: surgery.splice.pivot.key, level: surgery.splice.level })
}
await this.storage.writeMergeBranch({ key, left, right, pivot, surgery })
}
//
// The thing is this. Whenever I fiddle around serious with this code, I'll
// introduce a bug, I mean, just while editing I'll hit "x" in `vim` and
// delete a character, and when I run the test I'll get all kinds of evil.
// What I'm finding now is that there will be infinite loops when I release
// the pause in the finally block, but the pause enqueues a new entry when
// you resume it and then the appender sees that the page needs to merge so
// we come back here. This is only for a programmer error while editing.
// In practice, though, if there is a failure to write the journal, how do
// we proceed? Really leaning heavy on leaving the queue paused. The user
// will know the writes didn't finish, ah, no they won't.
// Might release the cartridges, but generally feel like we should leave
// the...
// Okay, here is where we could start to use the shutdown behavior. We might
// have a directory and anything that is dirty, we mkdir the name of the
// dirty page, so we continue to flush, but we stop balancing. Let's do
// this.
//
async _mergeLeaf (pause, { key, level }, cartridges, displace) {
const left = await this.descend({ key, level, fork: true }, cartridges)
const right = await this.descend({ key, level }, cartridges)
const pivot = await this.descend(right.pivot, cartridges)
const surgery = await this._surgery(right, pivot)
const messages = [{ method: 'balance', key: key, level: surgery.splice.level }]
const pauses = []
try {
pauses.push(await pause(left.page.id))
pauses.push(await pause(right.page.id))
// Add the items in the right page to the end of the left page.
const items = left.page.items
const merged = right.page.items.splice(0)
items.push.apply(items, merged)
// Set right reference of left page.
left.page.right = right.page.right
// Adjust heft of left entry.
left.cartridge.heft += right.cartridge.heft - 1
// TODO Remove after a while, used only for assertion in `Cache`.
right.cartridge.heft -= merged.reduce((sum, item) => sum + item.heft, 0)
// Mark the right page deleted, it will cause `indexOf` in the `Cursor`
// to return `null` indicating that the user must release the `Cursor`
// and descend again.
// **TODO** No longer necessary, right?
right.page.deleted = true
// See if the merged page needs to split or merge further.
if (this._isDirty(left.page, this.leaf)) {
this._fracture.enqueue('keephouse').candidates.push(left.entry.value.items[0].key)
}
// Replace the key of the pivot if necessary.
if (surgery.replacement != null) {
pivot.page.items[pivot.index].key = surgery.replacement
}
// Remove the branch page that references the leaf page.
surgery.splice.page.items.splice(surgery.splice.index, 1)
if (surgery.splice.index == 0) {
surgery.splice.page.items[0].key = null
}
//
// Because user updates are synchronous from descent when we go
// async any user writes will go to our new merged page. We do need
// to write the existing writes before we perform our merge.
//
const writes = { left: [], right: [] }
for (const entry of pauses[0].entries) {
writes.left.push.apply(writes.left, entry.writes.splice(0))
}
for (const entry of pauses[1].entries) {
writes.right.push.apply(writes.right, entry.writes.splice(0))
}
//
await this.storage.writeMergeLeaf({ left, right, surgery, pivot, writes, messages })
//
} finally {
pauses.forEach(pause => pause.resume())
}
await this.storage.balance(this, displace)
}
//
// **TODO** `copacetic` could go like this...
// We do a flat iteration of the tree from `0.1` following the right page.
// We first go through every directory and ensure that there is no directory
// named `seen` deleting it if it exists.
// Then we iterate and mark as `seen` every directory we visit.
// When we visit we assert that the page is correctly sorted. We then return
// the items to the user so the user can examine the entries.
// We then look for any directories that are unseen and assert that they are
// `merged` files.
// We can then iterate through the pages again and vacuum all pages that
// need to be vacuumed. We know assert that the pages are vacuumed
// correctly, nowhere does a page reference a page outside of its own
// directory.
// Then we can look at the unseen pages and see if any of them reference any
// of the `merged` files. If not we can delete the merged files.
//
async _keephouse (pause, canceled, { candidates }, displace) {
await this.deferrable.copacetic($ => $(), 'append', null, async () => {
this.deferrable.progress()
if (canceled) {
candidates.forEach(candidate => this._canceled.add(candidate))
} else {
for (const key of candidates) {
const cartridges = []
try {
const child = await this.descend({ key }, cartridges)
if (child.entry.value.items.length >= this.leaf.split) {
await this._splitLeaf(pause, key, child, cartridges, displace)
} else if (
! (
child.entry.value.id == '0.1' && child.entry.value.right == null
) &&
child.entry.value.items.length <= this.leaf.merge
) {
const merger = await this._selectMerger(key, child, cartridges)
if (merger != null) {
await this._mergeLeaf(pause, merger, cartridges, displace)
}
}
} finally {
cartridges.forEach(cartridge => cartridge.release())
}
}
}
})
}
_fractured ({ pause, canceled, key, value, displace }) {
switch (key) {
case 'keephouse':
return this._keephouse(pause, canceled, value, displace)
default:
return this._append(canceled, key, value)
}
}
}
module.exports = Sheaf
| Maintain write future set.
Was treating future set like old map of futures.
| sheaf.js | Maintain write future set. | <ide><path>heaf.js
<ide> this.deferrable.operational()
<ide> const append = this._fracture.enqueue(id)
<ide> append.value.writes.push(buffer)
<del> // **TODO** This is broken now for write-ahead since it is synchronous.
<del> // You would have to wait on a flush of the write-ahead log.
<del> // **TODO** So it would appear that the fractures should move somehow
<del> // into filesystem.
<del> if (writes[append.id] == null) {
<del> writes[append.id] = append.completed
<del> }
<add> writes.add(append.future)
<ide> }
<ide>
<ide> drain () { |
|
JavaScript | mit | a604c52af1f3bb200308c1698bd5f78043372e81 | 0 | D-PLACE/dplace,shh-dlce/dplace,NESCent/dplace,D-PLACE/dplace,D-PLACE/dplace,stefelisabeth/dplace,stefelisabeth/dplace,NESCent/dplace,shh-dlce/dplace,shh-dlce/dplace,stefelisabeth/dplace,shh-dlce/dplace,NESCent/dplace,NESCent/dplace,stefelisabeth/dplace,D-PLACE/dplace | angular.module('dplaceMapDirective', [])
.directive('dplaceMap', function(colorMapService) {
function link(scope, element, attrs) {
element.append("<div id='mapdiv' style='width:1140px; height:30rem;'></div>");
scope.localRegions = [];
scope.checkDirty = function() {
return !(angular.equals(scope.localRegions, scope.selectedRegions));
};
scope.updatesEnabled = true;
scope.map = $('#mapdiv').vectorMap({
map: 'tdwg-level2',
backgroundColor: 'white',
series: {
markers: [{
attribute: 'fill'
}]
},
regionStyle: {
initial: {
fill: '#428bca',
"fill-opacity": 1,
stroke: '#357ebd',
"stroke-width": 0,
"stroke-opacity": 1
},
hover: {
"fill-opacity": 0.8
},
selected: {
fill: '#113'
},
selectedHover: {
}
},
onRegionOver: function(e, code) {
if(attrs.region) {
scope.$apply(function () {
scope.region = code;
});
}
},
onRegionSelected: function(e, code, isSelected, selectedRegionCodes) {
if(scope.updatesEnabled && attrs.selectedRegions) {
scope.localRegions = selectedRegionCodes.map(function(code) {
return {
code: code,
name: scope.map.getRegionName(code)
};
});
var dirty = scope.checkDirty();
if(dirty) {
scope.$apply(function() {
scope.selectedRegions = angular.copy(scope.localRegions);
});
}
}
},
regionsSelectable: true
}).vectorMap('get','mapObject');
scope.addMarkers = function() {
scope.map.removeAllMarkers();
if(!scope.societies) {
return;
}
// get the society IDs
var societyIds = scope.societies.map(function(societyResult) {
return societyResult.society.id;
});
scope.societies.forEach(function(societyResult) {
var society = societyResult.society;
// Add a marker for each point
var marker = {latLng: society.location.coordinates.reverse(), name: society.name}
scope.map.addMarker(society.id, marker);
});
// Map IDs to colors
var colorMap = colorMapService.generateColorMap(societyIds);
scope.map.series.markers[0].setValues(colorMap);
};
if(attrs.societies) {
// Update markers when societies change
scope.$watchCollection('societies', function(oldvalue, newvalue) {
scope.addMarkers();
});
}
if(attrs.selectedRegions) {
scope.$watchCollection('selectedRegions', function(oldvalue, newvalue) {
var dirty = scope.checkDirty();
if(dirty) {
// update the local variable first
scope.localRegions = angular.copy(scope.selectedRegions);
// then update the UI
scope.updatesEnabled = false;
var regionCodes = scope.localRegions.map(function(region){
return region.code;
});
scope.map.clearSelectedRegions();
scope.map.setSelectedRegions(regionCodes);
scope.updatesEnabled = true;
}
});
}
scope.$on('mapTabActivated', function(event, args) {
scope.map.updateSize();
});
scope.$on('$destroy', function() {
scope.map.remove();
});
}
return {
restrict: 'E',
scope: {
societies: '=',
region: '=',
selectedRegions: '='
},
link: link
};
}); | dplace_app/static/js/directives.js | angular.module('dplaceMapDirective', [])
.directive('dplaceMap', function(colorMapService) {
function link(scope, element, attrs) {
element.append("<div id='mapdiv' style='width:1140px; height:30rem;'></div>");
scope.localRegions = [];
scope.checkDirty = function() {
return !(angular.equals(scope.localRegions, scope.selectedRegions));
};
scope.updatesEnabled = true;
scope.map = $('#mapdiv').vectorMap({
map: 'tdwg-level2',
backgroundColor: 'white',
series: {
markers: [{
attribute: 'fill'
}]
},
regionStyle: {
initial: {
fill: '#428bca',
"fill-opacity": 1,
stroke: '#357ebd',
"stroke-width": 0,
"stroke-opacity": 1
},
hover: {
"fill-opacity": 0.8
},
selected: {
fill: '#113'
},
selectedHover: {
}
},
onRegionOver: function(e, code) {
if(attrs.region) {
scope.$apply(function () {
scope.region = code;
});
}
},
onRegionSelected: function(e, code, isSelected, selectedRegionCodes) {
if(scope.updatesEnabled && attrs.selectedRegions) {
scope.localRegions = selectedRegionCodes.map(function(code) {
return {
code: code,
name: scope.map.getRegionName(code)
};
});
var dirty = scope.checkDirty();
if(dirty) {
scope.$apply(function() {
scope.selectedRegions = angular.copy(scope.localRegions);
});
}
}
},
regionsSelectable: true
}).vectorMap('get','mapObject');
scope.addMarkers = function() {
scope.map.removeAllMarkers();
if(!scope.societies) {
return;
}
// get the society IDs
var societyIds = scope.societies.map(function(societyResult) {
return societyResult.society.id;
});
scope.societies.forEach(function(societyResult) {
var society = societyResult.society;
// Add a marker for each point
var marker = {latLng: society.location.coordinates.reverse(), name: society.name}
scope.map.addMarker(society.id, marker);
});
// Map IDs to colors
var colorMap = colorMapService.generateColorMap(societyIds);
scope.map.series.markers[0].setValues(colorMap);
};
if(attrs.societies) {
// Update markers when societies change
scope.$watchCollection('societies', function(oldvalue, newvalue) {
scope.addMarkers();
});
}
if(attrs.selectedRegions) {
scope.$watchCollection('selectedRegions', function(oldvalue, newvalue) {
var dirty = scope.checkDirty();
if(dirty) {
// update the local variable first
scope.localRegions = angular.copy(scope.selectedRegions);
// then update the UI
scope.updatesEnabled = false;
var regionCodes = scope.localRegions.map(function(region){
return region.code;
});
scope.map.clearSelectedRegions();
scope.map.setSelectedRegions(regionCodes);
scope.updatesEnabled = true;
}
});
}
scope.$on('mapTabActivated', function(event, args) {
scope.map.setSize();
});
scope.$on('$destroy', function() {
scope.map.remove();
});
}
return {
restrict: 'E',
scope: {
societies: '=',
region: '=',
selectedRegions: '='
},
link: link
};
}); | Fix map resize call
Changed in 2.0 from setSize to updateSize
| dplace_app/static/js/directives.js | Fix map resize call | <ide><path>place_app/static/js/directives.js
<ide> });
<ide> }
<ide> scope.$on('mapTabActivated', function(event, args) {
<del> scope.map.setSize();
<add> scope.map.updateSize();
<ide> });
<ide>
<ide> scope.$on('$destroy', function() { |
|
Java | mit | error: pathspec 'LeetCodeSolutions/java/src/76_Minimum_Window_Substring/Solution.java' did not match any file(s) known to git
| a0b5e619694d5acfb8e72a165543e1deb87a0fe0 | 1 | ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground | import java.util.HashMap;
import java.util.Map;
class Solution {
public String minWindow(String s, String t) {
if(s == null || s.length() < t.length() || s.length() == 0){
return "";
}
Map<Character, Integer> map = new HashMap<>();
for (char c : t.toCharArray()) {
map.put(c, map.getOrDefault(c, 0) + 1);
}
int left = 0, minLeft = 0;
int minLen = s.length() + 1;
int count = 0;
for (int right = 0; right < s.length(); right++) {
if (map.containsKey(s.charAt(right))) {
map.put(s.charAt(right), map.get(s.charAt(right)) - 1);
if (map.get(s.charAt(right)) >= 0) {
count++;
}
while (count == t.length()) {
if(right - left + 1 < minLen){
minLeft = left;
minLen = right - left + 1;
}
if(map.containsKey(s.charAt(left))){
map.put(s.charAt(left), map.get(s.charAt(left)) + 1);
if(map.get(s.charAt(left)) > 0){
count--;
}
}
left++;
}
}
}
if(minLen > s.length()) {
return "";
}
return s.substring(minLeft, minLeft + minLen);
}
}
| LeetCodeSolutions/java/src/76_Minimum_Window_Substring/Solution.java | 76. Minimum Window Substring
| LeetCodeSolutions/java/src/76_Minimum_Window_Substring/Solution.java | 76. Minimum Window Substring | <ide><path>eetCodeSolutions/java/src/76_Minimum_Window_Substring/Solution.java
<add>import java.util.HashMap;
<add>import java.util.Map;
<add>
<add>class Solution {
<add> public String minWindow(String s, String t) {
<add> if(s == null || s.length() < t.length() || s.length() == 0){
<add> return "";
<add> }
<add> Map<Character, Integer> map = new HashMap<>();
<add> for (char c : t.toCharArray()) {
<add> map.put(c, map.getOrDefault(c, 0) + 1);
<add> }
<add>
<add> int left = 0, minLeft = 0;
<add> int minLen = s.length() + 1;
<add> int count = 0;
<add> for (int right = 0; right < s.length(); right++) {
<add> if (map.containsKey(s.charAt(right))) {
<add> map.put(s.charAt(right), map.get(s.charAt(right)) - 1);
<add> if (map.get(s.charAt(right)) >= 0) {
<add> count++;
<add> }
<add>
<add> while (count == t.length()) {
<add> if(right - left + 1 < minLen){
<add> minLeft = left;
<add> minLen = right - left + 1;
<add> }
<add> if(map.containsKey(s.charAt(left))){
<add> map.put(s.charAt(left), map.get(s.charAt(left)) + 1);
<add> if(map.get(s.charAt(left)) > 0){
<add> count--;
<add> }
<add> }
<add> left++;
<add> }
<add> }
<add> }
<add>
<add> if(minLen > s.length()) {
<add> return "";
<add> }
<add>
<add> return s.substring(minLeft, minLeft + minLen);
<add> }
<add>} |
|
Java | apache-2.0 | be21ede1e31b77c8377d405256e08a9f3ae5100e | 0 | lopl233/BoarTrainer-master2 |
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Scanner;
import java.io.*;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
class Klient_reciver {
public static boolean isLogged=false;
private SSLConnector sslConnector;
private PrintWriter pw;
private BufferedReader br;
Klient_reciver() throws IOException {
String currentDir = System.getProperty("user.dir")+"/testkeysore.p12";
System.setProperty("javax.net.ssl.keyStore",currentDir);
System.setProperty("javax.net.ssl.keyStorePassword","dzikidzik");
System.setProperty("javax.net.ssl.keyStoreType","PKCS12");
System.setProperty("javax.net.ssl.trustStore",currentDir);
System.setProperty("javax.net.ssl.trustStorePassword","dzikidzik");
System.setProperty("javax.net.ssl.trustStoreType","PKCS12");
sslConnector = SSLConnector.getInstance();
try {
sslConnector.sslsocket.startHandshake();
} catch (IOException e) {}
pw = new PrintWriter(sslConnector.sslsocket.getOutputStream());
br = new BufferedReader(new InputStreamReader(sslConnector.sslsocket.getInputStream()));
}
public JSONObject reciver(Map map) throws IOException, JSONException {
JSONObject message = new JSONObject(map);
System.out.println("----Sending to server----");
System.out.println(JsonToString(message));
pw.write(message.toString());
pw.write("\n");
pw.flush();
String serverAnswer = br.readLine();
JSONObject answer = new JSONObject(serverAnswer);
System.out.println("----Answer from server");
System.out.println(JsonToString(answer));
return answer;
}
public JSONObject logIn(String Login, String Password, int device_id) throws JSONException, IOException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "LoginRequest");
data.put("login", Login);
data.put("password", Password);
data.put("device_id",Integer.toString(device_id));
return reciver(data);
}//koniec funkcji logowania
public JSONObject GetData() throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "GetBasicData");
return reciver(data);
}
public JSONObject AddDevice(String login, String password, int device_id,int kod) throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "AddDevice");
data.put("login", login);
data.put("password", password);
data.put("device_id", Integer.toString(device_id));
data.put("verify_code", Integer.toString(kod));
return reciver(data);
}
public JSONObject Register(String login, String password, String name, String lastname, String email, int phone, String verify_way) throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "RegisterNewClient");
data.put("login", login);
data.put("password", password);
data.put("name", name);
data.put("lastname", lastname);
data.put("PHONE",Integer.toString(phone));
data.put("EMAIL",email);
data.put("verify_way",verify_way);
return reciver(data);
}
public String JsonToString(JSONObject jsonObject) throws JSONException {
String result = "";
Iterator<?> keys = jsonObject.keys();
while( keys.hasNext() ) {
String key = (String)keys.next();
result = result + key + ":"+ jsonObject.getString(key) + "\n";
}
return result;
}
}//koniec klasy klient
class main{
public static Klient_reciver klient_reciver;
public static void main(String[] args) throws IOException, JSONException {
klient_reciver = new Klient_reciver();
Scanner scanner = new Scanner(System.in);
String choice = "";
while(true){
System.out.println("1. Login");
System.out.println("2. Get user data");
System.out.println("3. Add device");
System.out.println("4. Login");
System.out.println("5. Login");
choice = scanner.next();
switch (choice) {
case "1":
System.out.println("Login : ");
String login = scanner.next();
System.out.println("Haslo : ");
String password = scanner.next();
System.out.println("ID urzadzenia : ");
int device_id = scanner.nextInt();
klient_reciver.logIn(login, password, device_id);
break;
case "2":
break;
case "3":
break;
case "4":
break;
case "5":
break;
}
}
}
} | klient/src/klient.java |
import org.json.JSONException;
import org.json.JSONObject;
import java.io.*;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
class Klient_reciver {
public static boolean isLogged=false;
private SSLConnector sslConnector;
private PrintWriter pw;
private BufferedReader br;
Klient_reciver(){
String currentDir = System.getProperty("user.dir")+"/testkeysore.p12";
System.setProperty("javax.net.ssl.keyStore",currentDir);
System.setProperty("javax.net.ssl.keyStorePassword","dzikidzik");
System.setProperty("javax.net.ssl.keyStoreType","PKCS12");
System.setProperty("javax.net.ssl.trustStore",currentDir);
System.setProperty("javax.net.ssl.trustStorePassword","dzikidzik");
System.setProperty("javax.net.ssl.trustStoreType","PKCS12");
sslConnector = SSLConnector.getInstance();
try {
sslConnector.sslsocket.startHandshake();
} catch (IOException e) {}
}
public JSONObject reciver(Map map) throws IOException, JSONException {
JSONObject message = new JSONObject(map);
pw.write(message.toString());
pw.write("\n");
pw.flush();
String serverAnswer = br.readLine();
return new JSONObject(serverAnswer);
}
public JSONObject logIn(String Login, String Password, int device_id) throws JSONException, IOException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "LoginRequest");
data.put("login", Login);
data.put("password", Password);
data.put("device_id",Integer.toString(device_id));
return reciver(data);
}//koniec funkcji logowania
public JSONObject GetData() throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "GetBasicData");
return reciver(data);
}
public JSONObject AddDevice(String login, String password, int device_id,int kod) throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "AddDevice");
data.put("login", login);
data.put("password", password);
data.put("device_id", Integer.toString(device_id));
data.put("verify_code", Integer.toString(kod));
return reciver(data);
}
public JSONObject Register(String login, String password, String name, String lastname, String email, int phone, String verify_way) throws IOException, JSONException {
Map<String, String> data = new LinkedHashMap<>();
data.put("message_type", "RegisterNewClient");
data.put("login", login);
data.put("password", password);
data.put("name", name);
data.put("lastname", lastname);
data.put("PHONE",Integer.toString(phone));
data.put("EMAIL",email);
data.put("verify_way",verify_way);
return reciver(data);
}
public String JsonToString(JSONObject jsonObject) throws JSONException {
String result = "";
Iterator<?> keys = jsonObject.keys();
while( keys.hasNext() ) {
String key = (String)keys.next();
result = result + key + ":"+ jsonObject.getString(key) + "\n";
}
return result;
}
}//koniec klasy klient
class main{
public static Klient_reciver klient_reciver;
public static void main(String[] args) {
klient_reciver = new Klient_reciver();
while(true){
}
}
} | klient wersja pół-gotowa
| klient/src/klient.java | klient wersja pół-gotowa | <ide><path>lient/src/klient.java
<ide>
<ide> import org.json.JSONException;
<ide> import org.json.JSONObject;
<del>
<add>import java.util.Scanner;
<ide> import java.io.*;
<ide> import java.util.Iterator;
<ide> import java.util.LinkedHashMap;
<ide> private PrintWriter pw;
<ide> private BufferedReader br;
<ide>
<del> Klient_reciver(){
<add> Klient_reciver() throws IOException {
<ide>
<ide> String currentDir = System.getProperty("user.dir")+"/testkeysore.p12";
<ide> System.setProperty("javax.net.ssl.keyStore",currentDir);
<ide> try {
<ide> sslConnector.sslsocket.startHandshake();
<ide> } catch (IOException e) {}
<add> pw = new PrintWriter(sslConnector.sslsocket.getOutputStream());
<add> br = new BufferedReader(new InputStreamReader(sslConnector.sslsocket.getInputStream()));
<ide>
<ide> }
<ide>
<ide> public JSONObject reciver(Map map) throws IOException, JSONException {
<ide> JSONObject message = new JSONObject(map);
<ide>
<add> System.out.println("----Sending to server----");
<add> System.out.println(JsonToString(message));
<ide> pw.write(message.toString());
<ide> pw.write("\n");
<ide> pw.flush();
<ide>
<ide> String serverAnswer = br.readLine();
<add> JSONObject answer = new JSONObject(serverAnswer);
<ide>
<del> return new JSONObject(serverAnswer);
<add> System.out.println("----Answer from server");
<add> System.out.println(JsonToString(answer));
<add>
<add> return answer;
<ide> }
<ide>
<ide> public JSONObject logIn(String Login, String Password, int device_id) throws JSONException, IOException {
<ide> class main{
<ide> public static Klient_reciver klient_reciver;
<ide>
<del> public static void main(String[] args) {
<add> public static void main(String[] args) throws IOException, JSONException {
<ide> klient_reciver = new Klient_reciver();
<add> Scanner scanner = new Scanner(System.in);
<add> String choice = "";
<ide> while(true){
<add> System.out.println("1. Login");
<add> System.out.println("2. Get user data");
<add> System.out.println("3. Add device");
<add> System.out.println("4. Login");
<add> System.out.println("5. Login");
<ide>
<ide>
<add> choice = scanner.next();
<add> switch (choice) {
<add> case "1":
<add> System.out.println("Login : ");
<add> String login = scanner.next();
<add> System.out.println("Haslo : ");
<add> String password = scanner.next();
<add> System.out.println("ID urzadzenia : ");
<add> int device_id = scanner.nextInt();
<add> klient_reciver.logIn(login, password, device_id);
<add> break;
<add> case "2":
<add> break;
<add> case "3":
<add> break;
<add> case "4":
<add> break;
<add> case "5":
<add> break;
<ide>
<ide>
<add> }
<ide>
<ide>
<ide> } |
|
JavaScript | agpl-3.0 | 0910146d4ee311749867bb1fe3ffe64d78887b80 | 0 | flamingo-geocms/3rd-party-components | /*
* Copyright (C) 2012-2013 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Ro Tercera component
* @author <a href="mailto:[email protected]">Roy Braam</a>
*/
Ext.define ("viewer.components.RoTercera",{
extend: "viewer.components.Component",
panel: null,
minWidth: 280,
minHeight: 540,
comboWidth: 200,
//stores
ownerStore: null,
typeStore: null,
statusStore: null,
//combo boxes
ownerCombo: null,
typeCombo: null,
statusCombo: null,
docContainer: null,
planContainer: null,
legendaButton: null,
drawCommentButton: null,
showAllCommentButton: null,
selectedPlanContainer: null,
currentPlans:null,
selectedPlan:null,
wmsLayer: null,
roToc: null,
roComment: null,
publicCommentfilter: null,
planCommentFilter: null,
commentAppLayer: null,
commentMapLayer: null,
commentLayerIndex: null,
config:{
name: "Ro-Tercera client",
title: "",
titlebarIcon : "",
tooltip : "",
label: "",
//TODO: make configurable
roServiceUrl: "",
terceraRequestPage: "https://tercera.provincie-utrecht.nl/RequestPage.aspx",
roonlineLayers: null,
roonlineServiceUrl: null,
layers: null
},
/**
* @constructor
* creating a Ro tercera component
*/
constructor: function (conf){
conf=this.setDefaults(conf);
var resourceUrl = "";
if (actionBeans && actionBeans["componentresource"]){
resourceUrl=actionBeans["componentresource"];
}
resourceUrl=Ext.String.urlAppend(resourceUrl,"className=viewer.components.RoTercera")
conf.iconUrl=Ext.String.urlAppend(resourceUrl,"resource=resources/images/icon16_gray.png");;
viewer.components.RoTercera.superclass.constructor.call(this, conf);
this.initConfig(conf);
var me = this;
if(this.hasButton == null || this.hasButton){
this.renderButton({
handler: function(){
me.buttonClick();
},
text: me.title,
icon: Ext.String.urlAppend(resourceUrl,"resource=resources/images/icon38_gray.png"),
tooltip: me.tooltip,
label: me.label
});
}
//this.test();
this.roToc = Ext.create("viewer.components.rotercera.RoToc",{});
this.roComment = Ext.create("viewer.components.rotercera.RoComment",conf,this);
this.roAllComment = Ext.create("viewer.components.rotercera.RoAllComment",conf,this);
return this;
},
setDefaults: function(conf){
//set minWidth:
if(conf.details.width < this.minWidth || !Ext.isDefined(conf.details.width)) conf.details.width = this.minWidth;
//set minHeight:
if(conf.details.height < this.minHeight || !Ext.isDefined(conf.details.height)) conf.details.height = this.minHeight;
if (Ext.isEmpty(conf.roonlineServiceUrl)){
conf.roonlineServiceUrl="http://afnemers.ruimtelijkeplannen.nl/afnemers/services";
}if (Ext.isEmpty(conf.roonlineLayers)){
conf.roonlineLayers="BP:Bestemmingsplangebied,BP:Wijzigingsplangebied,BP:Enkelbestemming,\
BP:Figuur,BP:Lettertekenaanduiding,BP:Maatvoering,BP:Dubbelbestemming,BP:Bouwvlak,\
BP:Gebiedsaanduiding,BP:Inpassingsplangebied,BP:Bouwaanduiding,BP:Functieaanduiding,\
PP:ProvinciaalPlangebied,PP:ProvinciaalGebied,PP:ProvinciaalComplex,\
PP:ProvinciaalVerbinding,NP:NationaalPlangebied,XGB:Besluitvlak,XGB:Besluitsubvlak,\
XGB:Exploitatieplangebied,XGB:Gerechtelijkeuitspraakgebied,XGB:Projectbesluitgebied,\
XGB:Tijdelijkeontheffingbuitenplansgebied,XGB:Voorbereidingsbesluitgebied,PCP:Plangebied";
}
return conf;
},
/**
* Called when the button is clicked. Opens the print window (if not already opened) and creates a form.
* If the window was invisible the preview will be redrawn
*/
buttonClick: function(){
if(!this.popup.popupWin.isVisible()){
this.popup.show();
}
if (this.panel==null){
this.createPanel();
}
},
createPanel: function (){
var me = this;
//creates stores:
this.ownerStore=Ext.create('Ext.data.Store', {
fields: ['code', 'name'],
data: [
{name: "Abcoude", code: "0305"},
{name: "Amersfoort", code: "0307"},
{name: "Baarn", code: "0308"},
{name: "Breukelen", code: "0311"},
{name: "Bunnik", code: "0312"},
{name: "Bunschoten", code: "0313"},
{name: "De Bilt", code: "0310"},
{name: "De Ronde Venen", code: "0736"},
{name: "Eemnes", code: "0317"},
{name: "Houten", code: "0321"},
{name: "IJsselstein", code: "0353"},
{name: "Leusden", code: "0327"},
{name: "Loenen", code: "0329"},
{name: "Lopik", code: "0331"},
{name: "Maarssen", code: "0333"},
{name: "Montfoort", code: "0335"},
{name: "Nieuwegein", code: "0356"},
{name: "Oudewater", code: "0589"},
{name: "Provincie Utrecht", code: "9926"},
{name: "Renswoude", code: "0339"},
{name: "Rhenen", code: "0340"},
{name: "Soest", code: "0342"},
{name: "Stichtse Vecht", code: "1904"},
{name: "Utrecht", code: "0344"},
{name: "Utrechtse Heuvelrug", code: "1581"},
{name: "Veenendaal", code: "0345"},
{name: "Vianen", code: "0620"},
{name: "Wijk bij Duurstede", code: "0352"},
{name: "Woerden", code: "0632"},
{name: "Woudenberg", code: "0351"},
{name: "Zeist", code: "0355"}
]
});
this.typeStore = Ext.create('Ext.data.Store',{
fields: ['key','value']
});
this.statusStore = Ext.create('Ext.data.Store',{
fields: ['key','value']
});
this.docStore;
//create comboboxes
this.ownerCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Eigenaar',
labelAlign: 'top',
store: this.ownerStore,
queryMode: 'local',
displayField: 'name',
valueField: 'code',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.ownerChanged
}
}
});
this.typeCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Plan type',
labelAlign: 'top',
store: this.typeStore,
queryMode: 'local',
displayField: 'value',
valueField: 'key',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.typeChanged
}
}
});
this.statusCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Plan status',
labelAlign: 'top',
store: this.statusStore,
queryMode: 'local',
displayField: 'value',
valueField: 'key',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.statusChanged
}
}
});
var pContainer = Ext.create('Ext.panel.Panel',{
layout: {
type: 'vbox'
},
height: 190,
width: '100%',
items: [{
xtype: 'container',
name: 'planContainerValues',
id: "planContainerValues",
autoScroll: true,
height: 190,
width: '100%'
}]
});
var docContainer = Ext.create('Ext.panel.Panel',{
layout: {
type: 'vbox'
},
height: 100,
width: '100%',
items: [{
xtype: 'container',
name: 'docContainerValues',
id: "docContainerValues",
autoScroll: true,
height: 100,
width: '100%'
}]
});
this.legendaButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Legenda",
style: {
fontWeight: 'bold',
cursor: 'pointer'
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.showToc();
}
}
});
this.drawCommentButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Teken commentaar",
style: {
fontWeight: 'bold',
cursor: 'pointer',
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.drawComment();
}
},
hidden: true
});
this.showAllCommentButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Toon alle commentaar",
style: {
fontWeight: 'bold',
cursor: 'pointer',
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.showAllComment();
}
},
hidden: true
});
this.selectedPlanContainer = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Geen plan geselecteerd",
});
//create panel
this.panel = Ext.create('Ext.panel.Panel', {
layout: {
type: 'vbox',
align: 'stretch'
},
padding: 5,
width: "100%",
height: '100%',
border: 0,
renderTo: me.getContentDiv(),
items: [
this.ownerCombo,
this.typeCombo,
this.statusCombo,
{
xtype: 'label',
text: 'Plannen:'
},
pContainer,
{
xtype: 'label',
text: 'Documenten:'
},
docContainer,
this.legendaButton,
this.drawCommentButton,
this.showAllCommentButton,
this.selectedPlanContainer,
{
xtype: "container",
html: "<a id='linkForVerwerk' href='javascript:void(0)' style='visibility:hidden;position:absolute;'></a>",
style: {
visibility: "hidden"
}
}
]
});
},
/**
* Changed functions:
*/
ownerChanged: function(obj,value){
this.setSelectedPlan(null);
this.panel.setLoading("Bezig met laden plannen");
Ext.Ajax.request({
url: this.roServiceUrl,
timeout: 240000,
scope:this,
params: {
overheidsCode: value
},
success: function ( result, request ) {
var res = Ext.JSON.decode(result.responseText);
if(res.success){
this.setPlans(res.results);
}else{
Ext.MessageBox.alert('Foutmelding', "Fout bij laden plannen" + res.error);
}
this.panel.setLoading(false);
},
failure: function ( result, request) {
Ext.MessageBox.alert('Foutmelding', "Fout bij ophalen plannen" + result.responseText);
this.panel.setLoading(false);
}
});
},
typeChanged: function(obj,value){
this.setSelectedPlan(null);
var plans= this.filterCurrentPlans(value);
var uniqueStatus = this.getUniqueStatus(plans);
this.setStatus(uniqueStatus);
this.updatePlansContainer(plans);
},
statusChanged: function(obj,value){
this.setSelectedPlan(null);
var typeValue= this.typeCombo.getValue();
var plans= this.filterCurrentPlans(typeValue,value);
this.updatePlansContainer(plans);
},
/**
* Set the loaded plans.
* @param {object} plans a object array with plans.
*/
setPlans: function (plans){
this.currentPlans = plans;
this.updatePlansContainer(plans);
var uniqueTypes = this.getUniqueType(this.currentPlans);
var uniqueStatus = this.getUniqueStatus(this.currentPlans);
this.setTypes(uniqueTypes);
this.setStatus(uniqueStatus);
},
/**
* Update the container with the list of plans.
* @param {object} plans a object array with plans.
*/
updatePlansContainer: function(plans){
if (this.planContainer===null){
this.planContainer = Ext.getCmp("planContainerValues");
}
this.planContainer.removeAll();
for (var i=0; i < plans.length; i++){
var plan = plans[i];
var el=this.createPlanItem(plan);
this.planContainer.add(el);
}
this.planContainer.doLayout();
},
/**
* Get a unique list of values of a property
* @param {object} plans a object of the plans
* @param {String} a string that represents the name of the property
*/
getUniqueValues: function (plans,property){
var uniqueStatus=[];
for (var i=0; i < plans.length; i++){
var plan = plans[i];
if (plan[property] &&
!Ext.Array.contains(uniqueStatus,plan[property])){
uniqueStatus.push(plan[property]);
}
}
return uniqueStatus;
},
getUniqueStatus: function(plans){
return this.getUniqueValues(plans,"planstatus");
},
getUniqueType: function(plans){
return this.getUniqueValues(plans,"typePlan");
},
/**
* Set the available types
*/
setTypes: function(types){
var values= [];
for (var i=0; i < types.length; i ++){
values.push({key: types[i],value : types[i]})
}
this.typeStore.loadData(values,false);
},
setStatus: function(status){
var values= [];
for (var i=0; i < status.length; i ++){
values.push({key: status[i],value : status[i]})
}
this.statusStore.loadData(values,false);
},
/**
* Called when plan is clicked
*/
onPlanClicked: function(plan){
this.setSelectedPlan(plan);
},
setSelectedPlan: function(plan){
this.selectedPlan = plan;
if (this.selectedPlan==null){
this.clearLayer();
this.selectedPlanContainer.update("Geen plan geselecteerd");
this.drawCommentButton.hide();
this.showAllCommentButton.hide();
this.setPlanCommentFilter(null);
}else{
if(plan.origin == 'Tercera' && plan.wms==undefined){
var me=this;
var id=plan.identificatie;
Ext.MessageBox.confirm({titel:'Verwerk plan',
msg:'Plan is niet verwerkt, wilt u het plan alsnog verwerken?',
width: 100,
buttons: Ext.Msg.YESNO,
buttonText: {
yes: "Ja",
no: "Nee"
},
fn: function(button,event){
if(button=="yes"){
var username=null;
if (user!=null){
username= encodeURIComponent(user.name);
}
var url= me.terceraRequestPage;
url+= url.indexOf("?">0) ? "&" : "?";
url+="idn="+encodeURIComponent(plan.identificatie);
if (username){
url+="&user="+username;
}
var link = document.getElementById("linkForVerwerk");
link.target = "_parent";
link.href = url;
link.click();
}
}
});
}else {
var prePlanText="";
var ogcProps={
exceptions: "application/vnd.ogc.se_inimage",
srs: "EPSG:28992",
version: "1.1.1",
styles: "",
format: "image/png",
transparent: true,
noCache: true,
};
var options={id: "RoTerceraLayer"};
if (plan.origin == 'Tercera'){
prePlanText = "(L0K) ";
Ext.Ajax.request({
url: this.roServiceUrl,
timeout: 240000,
scope:this,
params: {
wmsUrl: plan.wms,
getTerceraWMSLayers: 'b'
},
success: function ( result, request ) {
var res = Ext.JSON.decode(result.responseText);
if(res.success){
ogcProps.layers=res.layers;
ogcProps.query_layers=res.layers;
options.layers=res.layers;
}else{
Ext.MessageBox.alert('Foutmelding', "Fout bij laden plannen" + res.error);
}
this.setLayer(plan.wms,ogcProps,options);
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
},
failure: function ( result, request) {
Ext.MessageBox.alert('Foutmelding', "Fout bij ophalen plannen" + result.responseText);
}
});
}else{ //Ro-online plan
prePlanText="(RO) ";
ogcProps.layers=this.roonlineLayers.split(",");
ogcProps.query_layers=this.roonlineLayers.split(",");
options.layers= this.roonlineLayers.split(",");
if (window.location.hostname ==undefined || window.location.hostname != "localhost"){
ogcProps.sld = Ext.create("viewer.SLD").createURL(options.layers,null,null,null,null,"app:plangebied='"+plan.identificatie+"'");
}
this.setLayer(this.roonlineServiceUrl,ogcProps,options);
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
}
this.selectedPlanContainer.update(prePlanText+this.selectedPlan.identificatie);
}
this.setPlanCommentFilter(plan.identificatie);
if (plan.bbox){
var map=this.viewerController.mapComponent.getMap();
map.zoomToExtent(new viewer.viewercontroller.controller.Extent(plan.bbox.minx,plan.bbox.miny,plan.bbox.maxx,plan.bbox.maxy));
}
if (plan.verwijzingNaarTekst){
var docs = plan.verwijzingNaarTekst.split(",");
this.setDocs(docs);
}
this.drawCommentButton.show();
this.showAllCommentButton.show();
}
},
setDocs: function (docs){
if (this.docContainer === null){
this.docContainer = Ext.getCmp("docContainerValues");
}
this.docContainer.removeAll();
Ext.Array.forEach(docs, function(item,index){
var key=item.replace(" ","");
var value=key;
var name=this.getDocType(key);
var el={
xtype: 'container',
html: name,
width: '100%',
listeners:{
element: 'el',
click: function(){
window.open(value,key.replace(/ /g,"_"),{});
}
},
style: {
cursor: 'pointer'
}
};
this.docContainer.add(el);
},this);
},
setPlanCommentFilter: function(planId){
if (planId==null && this.planCommentFilter!==null){
this.viewerController.removeFilter(this.planCommentFilter.id,this.getCommentAppLayer());
this.planCommentFilter=null;
this.commentMapLayer.setVisible(false);
}else if (planId !=null){
this.planCommentFilter = Ext.create("viewer.components.CQLFilterWrapper",{
id: "planFilter_"+this.getName(),
cql: this.roComment.planIdAttributeName+"='"+planId+"'",
operator : "AND",
type: "ATTRIBUTE"
});
this.viewerController.setFilter(this.planCommentFilter,this.getCommentAppLayer());
this.commentMapLayer.setVisible(true);
}
},
getCommentAppLayer: function(){
if (this.commentAppLayer==null || this.commentAppLayer == undefined){
if (this.layers){
this.commentAppLayer = this.viewerController.getAppLayerById(this.layers[0]);
if (this.commentAppLayer){
this.commentMapLayer = this.viewerController.getOrCreateLayer(this.commentAppLayer);
var cql = "("+this.roComment.publicAttributeName+"=true"
if (user){
cql+= " OR "+this.roComment.ownerAttributeName+ "='"+user+"'";
}
cql+=")"
this.publicCommentfilter = Ext.create("viewer.components.CQLFilterWrapper",{
id: "publicFilter_"+this.getName(),
cql: cql,
operator : "AND",
type: "ATTRIBUTE"
});
this.viewerController.setFilter(this.publicCommentfilter,this.commentAppLayer);
}
}
}
return this.commentAppLayer;
},
showToc: function(){
this.roToc.show();
if (this.selectedPlan){
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
}
},
drawComment: function(){
if(this.selectedPlan && this.selectedPlan.identificatie){
this.roComment.newComment(this.selectedPlan.identificatie);
}
},
showAllComment: function(){
this.roAllComment.getAllComments(this.selectedPlan.identificatie);
},
/**
* Load layer in map
*/
setLayer: function (url,props,options){
var index =-1;
if (this.wmsLayer!=null){
index = this.viewerController.mapComponent.getMap().getLayerIndex(this.wmsLayer);
}else if (this.layers){
index = this.viewerController.mapComponent.getMap().getLayerIndex(this.commentMapLayer);
//index--;
}
this.clearLayer();
this.wmsLayer = this.viewerController.mapComponent.createWMSLayer("rolayer", url ,props, options,this.viewerController);
this.wmsLayer.setDetails({
"summary.description" : "Omschrijving lalala",
"summary.link": "",
"summary.image": "",
"summary.title": ""
});
this.viewerController.mapComponent.getMap().addLayer(this.wmsLayer);
if (index>=0){
this.viewerController.mapComponent.getMap().setLayerIndex(this.wmsLayer,index);
this.viewerController.mapComponent.getMap().setLayerIndex(this.commentMapLayer,index+1);
}
},
clearLayer: function (){
if (this.wmsLayer!=null){
this.viewerController.mapComponent.getMap().removeLayer(this.wmsLayer);
delete this.wmsLayer;
}
},
/**
* Filter the plans
*/
filterCurrentPlans: function (type,status){
var plans=[];
for (var i=0; i < this.currentPlans.length; i++){
var plan = this.currentPlans[i];
var cmp = Ext.getCmp(this.currentPlans[i].identificatie);
var filtered=false;
if (type && plan.typePlan != type){
filtered=true;
}
if (status && plan.planstatus != status){
filtered=true;
}
if (!filtered){
plans.push(plan);
}
}
return plans;
},
/**
* Create a plan item.
*/
createPlanItem: function(planObj){
var me=this;
var color="#000000";
if (planObj.origin == "Tercera"){
if (planObj.wms){
color="green";
}else{
color="#888888";
}
}
var el={
xtype: 'container',
id: planObj.identificatie,
html: planObj.naam,
width: '100%',
border: 1,
listeners:{
element: 'el',
click: function(){
me.onPlanClicked(planObj);
}
},
style: {
color: color,
cursor: 'pointer'
}
};
return el;
},
/**
* Get the full type name of the document.
*/
getDocType: function(key){
var returnValue=key;
if (key.toLowerCase().indexOf("r_nl")>=0){
returnValue="Regels";
}else if(key.toLowerCase().indexOf("rb_nl")>=0){
returnValue="Bijlagen bij de regels";
}else if(key.toLowerCase().indexOf("t_nl")>=0){
returnValue="Toelichting";
}else if(key.toLowerCase().indexOf("tb_nl")>=0){
returnValue="Bijlagen bij de toelichting";
}else if(key.toLowerCase().indexOf("i_nl")>=0){
returnValue="Illustratie";
}else if(key.toLowerCase().indexOf("vb_nl")>=0){
returnValue="Vaststellingsbesluit";
}else if(key.toLowerCase().indexOf("v_nl")>=0){
returnValue="Voorschriften";
}else if(key.toLowerCase().indexOf("pt_nl")>=0){
returnValue="Plantekst";
}else if(key.toLowerCase().indexOf("g_nl")>=0){
returnValue="Geleideformulier";
}else if(key.toLowerCase().indexOf("d_nl")>=0){
returnValue="Besluitdocument";
}else if(key.toLowerCase().indexOf("db_nl")>=0){
returnValue="Bijlagen bij besluitdocument";
}else if(key.toLowerCase().indexOf("b_nl")>=0){
returnValue="Beleidstekst/besluittekst";
}else if(key.toLowerCase().indexOf("bb_nl")>=0){
returnValue="Bijlage bij beleidstekst/besluittekst";
}else if(key.toLowerCase().indexOf("p_nl")>=0){
returnValue="Plankaart";
}
return returnValue;
},
getExtComponents: function() {
return [ (this.panel !== null) ? this.panel.getId() : '' ];
}
});
| ro-tercera-component/RoTercera.js | /*
* Copyright (C) 2012-2013 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Ro Tercera component
* @author <a href="mailto:[email protected]">Roy Braam</a>
*/
Ext.define ("viewer.components.RoTercera",{
extend: "viewer.components.Component",
panel: null,
minWidth: 280,
minHeight: 540,
comboWidth: 200,
//stores
ownerStore: null,
typeStore: null,
statusStore: null,
//combo boxes
ownerCombo: null,
typeCombo: null,
statusCombo: null,
docContainer: null,
planContainer: null,
legendaButton: null,
drawCommentButton: null,
showAllCommentButton: null,
selectedPlanContainer: null,
currentPlans:null,
selectedPlan:null,
wmsLayer: null,
roToc: null,
roComment: null,
publicCommentfilter: null,
planCommentFilter: null,
commentAppLayer: null,
commentMapLayer: null,
commentLayerIndex: null,
config:{
name: "Ro-Tercera client",
title: "",
titlebarIcon : "",
tooltip : "",
label: "",
//TODO: make configurable
roServiceUrl: "",
terceraRequestPage: "https://tercera.provincie-utrecht.nl/RequestPage.aspx",
roonlineLayers: null,
roonlineServiceUrl: null,
layers: null
},
/**
* @constructor
* creating a Ro tercera component
*/
constructor: function (conf){
conf=this.setDefaults(conf);
var resourceUrl = "";
if (actionBeans && actionBeans["componentresource"]){
resourceUrl=actionBeans["componentresource"];
}
resourceUrl=Ext.String.urlAppend(resourceUrl,"className=viewer.components.RoTercera")
conf.iconUrl=Ext.String.urlAppend(resourceUrl,"resource=resources/images/icon16_gray.png");;
viewer.components.RoTercera.superclass.constructor.call(this, conf);
this.initConfig(conf);
var me = this;
if(this.hasButton == null || this.hasButton){
this.renderButton({
handler: function(){
me.buttonClick();
},
text: me.title,
icon: Ext.String.urlAppend(resourceUrl,"resource=resources/images/icon38_gray.png"),
tooltip: me.tooltip,
label: me.label
});
}
//this.test();
this.roToc = Ext.create("viewer.components.rotercera.RoToc",{});
this.roComment = Ext.create("viewer.components.rotercera.RoComment",conf,this);
this.roAllComment = Ext.create("viewer.components.rotercera.RoAllComment",conf,this);
return this;
},
setDefaults: function(conf){
//set minWidth:
if(conf.details.width < this.minWidth || !Ext.isDefined(conf.details.width)) conf.details.width = this.minWidth;
//set minHeight:
if(conf.details.height < this.minHeight || !Ext.isDefined(conf.details.height)) conf.details.height = this.minHeight;
if (Ext.isEmpty(conf.roonlineServiceUrl)){
conf.roonlineServiceUrl="http://afnemers.ruimtelijkeplannen.nl/afnemers/services";
}if (Ext.isEmpty(conf.roonlineLayers)){
conf.roonlineLayers="BP:Bestemmingsplangebied,BP:Wijzigingsplangebied,BP:Enkelbestemming,\
BP:Figuur,BP:Lettertekenaanduiding,BP:Maatvoering,BP:Dubbelbestemming,BP:Bouwvlak,\
BP:Gebiedsaanduiding,BP:Inpassingsplangebied,BP:Bouwaanduiding,BP:Functieaanduiding,\
PP:ProvinciaalPlangebied,PP:ProvinciaalGebied,PP:ProvinciaalComplex,\
PP:ProvinciaalVerbinding,NP:NationaalPlangebied,XGB:Besluitvlak,XGB:Besluitsubvlak,\
XGB:Exploitatieplangebied,XGB:Gerechtelijkeuitspraakgebied,XGB:Projectbesluitgebied,\
XGB:Tijdelijkeontheffingbuitenplansgebied,XGB:Voorbereidingsbesluitgebied,PCP:Plangebied";
}
return conf;
},
/**
* Called when the button is clicked. Opens the print window (if not already opened) and creates a form.
* If the window was invisible the preview will be redrawn
*/
buttonClick: function(){
if(!this.popup.popupWin.isVisible()){
this.popup.show();
}
if (this.panel==null){
this.createPanel();
}
},
createPanel: function (){
var me = this;
//creates stores:
this.ownerStore=Ext.create('Ext.data.Store', {
fields: ['code', 'name'],
data: [
{name: "Abcoude", code: "0305"},
{name: "Amersfoort", code: "0307"},
{name: "Baarn", code: "0308"},
{name: "Breukelen", code: "0311"},
{name: "Bunnik", code: "0312"},
{name: "Bunschoten", code: "0313"},
{name: "De Bilt", code: "0310"},
{name: "De Ronde Venen", code: "0736"},
{name: "Eemnes", code: "0317"},
{name: "Houten", code: "0321"},
{name: "IJsselstein", code: "0353"},
{name: "Leusden", code: "0327"},
{name: "Loenen", code: "0329"},
{name: "Lopik", code: "0331"},
{name: "Maarssen", code: "0333"},
{name: "Montfoort", code: "0335"},
{name: "Nieuwegein", code: "0356"},
{name: "Oudewater", code: "0589"},
{name: "Provincie Utrecht", code: "9926"},
{name: "Renswoude", code: "0339"},
{name: "Rhenen", code: "0340"},
{name: "Soest", code: "0342"},
{name: "Stichtse Vecht", code: "1904"},
{name: "Utrecht", code: "0344"},
{name: "Utrechtse Heuvelrug", code: "1581"},
{name: "Veenendaal", code: "0345"},
{name: "Vianen", code: "0620"},
{name: "Wijk bij Duurstede", code: "0352"},
{name: "Woerden", code: "0632"},
{name: "Woudenberg", code: "0351"},
{name: "Zeist", code: "0355"}
]
});
this.typeStore = Ext.create('Ext.data.Store',{
fields: ['key','value']
});
this.statusStore = Ext.create('Ext.data.Store',{
fields: ['key','value']
});
this.docStore;
//create comboboxes
this.ownerCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Eigenaar',
labelAlign: 'top',
store: this.ownerStore,
queryMode: 'local',
displayField: 'name',
valueField: 'code',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.ownerChanged
}
}
});
this.typeCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Plan type',
labelAlign: 'top',
store: this.typeStore,
queryMode: 'local',
displayField: 'value',
valueField: 'key',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.typeChanged
}
}
});
this.statusCombo = Ext.create('viewer.components.FlamingoCombobox', {
fieldLabel: 'Plan status',
labelAlign: 'top',
store: this.statusStore,
queryMode: 'local',
displayField: 'value',
valueField: 'key',
width: this.comboWidth,
listeners: {
change:{
scope: this,
fn: this.statusChanged
}
}
});
var pContainer = Ext.create('Ext.panel.Panel',{
layout: {
type: 'vbox'
},
height: 190,
width: '100%',
items: [{
xtype: 'container',
name: 'planContainerValues',
id: "planContainerValues",
autoScroll: true,
height: 190,
width: '100%'
}]
});
var docContainer = Ext.create('Ext.panel.Panel',{
layout: {
type: 'vbox'
},
height: 100,
width: '100%',
items: [{
xtype: 'container',
name: 'docContainerValues',
id: "docContainerValues",
autoScroll: true,
height: 100,
width: '100%'
}]
});
this.legendaButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Legenda",
style: {
fontWeight: 'bold',
cursor: 'pointer'
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.showToc();
}
}
});
this.drawCommentButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Teken commentaar",
style: {
fontWeight: 'bold',
cursor: 'pointer',
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.drawComment();
}
},
hidden: true
});
this.showAllCommentButton = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Toon alle commentaar",
style: {
fontWeight: 'bold',
cursor: 'pointer',
},
listeners:{
element: 'el',
scope: this,
click: function(){
this.showAllComment();
}
},
hidden: true
});
this.selectedPlanContainer = Ext.create('Ext.container.Container',{
xtype: "container",
html: "Geen plan geselecteerd",
});
//create panel
this.panel = Ext.create('Ext.panel.Panel', {
layout: {
type: 'vbox',
align: 'stretch'
},
padding: 5,
width: "100%",
height: '100%',
border: 0,
renderTo: me.getContentDiv(),
items: [
this.ownerCombo,
this.typeCombo,
this.statusCombo,
{
xtype: 'label',
text: 'Plannen:'
},
pContainer,
{
xtype: 'label',
text: 'Documenten:'
},
docContainer,
this.legendaButton,
this.drawCommentButton,
this.showAllCommentButton,
this.selectedPlanContainer,
{
xtype: "container",
html: "<a id='linkForVerwerk' href='javascript:void(0)' style='visibility:hidden;position:absolute;'></a>",
style: {
visibility: "hidden"
}
}
]
});
},
/**
* Changed functions:
*/
ownerChanged: function(obj,value){
this.setSelectedPlan(null);
this.panel.setLoading("Bezig met laden plannen");
Ext.Ajax.request({
url: this.roServiceUrl,
timeout: 240000,
scope:this,
params: {
overheidsCode: value
},
success: function ( result, request ) {
var res = Ext.JSON.decode(result.responseText);
if(res.success){
this.setPlans(res.results);
}else{
Ext.MessageBox.alert('Foutmelding', "Fout bij laden plannen" + res.error);
}
this.panel.setLoading(false);
},
failure: function ( result, request) {
Ext.MessageBox.alert('Foutmelding', "Fout bij ophalen plannen" + result.responseText);
this.panel.setLoading(false);
}
});
},
typeChanged: function(obj,value){
this.setSelectedPlan(null);
var plans= this.filterCurrentPlans(value);
var uniqueStatus = this.getUniqueStatus(plans);
this.setStatus(uniqueStatus);
this.updatePlansContainer(plans);
},
statusChanged: function(obj,value){
this.setSelectedPlan(null);
var typeValue= this.typeCombo.getValue();
var plans= this.filterCurrentPlans(typeValue,value);
this.updatePlansContainer(plans);
},
/**
* Set the loaded plans.
* @param {object} plans a object array with plans.
*/
setPlans: function (plans){
this.currentPlans = plans;
this.updatePlansContainer(plans);
var uniqueTypes = this.getUniqueType(this.currentPlans);
var uniqueStatus = this.getUniqueStatus(this.currentPlans);
this.setTypes(uniqueTypes);
this.setStatus(uniqueStatus);
},
/**
* Update the container with the list of plans.
* @param {object} plans a object array with plans.
*/
updatePlansContainer: function(plans){
if (this.planContainer===null){
this.planContainer = Ext.getCmp("planContainerValues");
}
this.planContainer.removeAll();
for (var i=0; i < plans.length; i++){
var plan = plans[i];
var el=this.createPlanItem(plan);
this.planContainer.add(el);
}
this.planContainer.doLayout();
},
/**
* Get a unique list of values of a property
* @param {object} plans a object of the plans
* @param {String} a string that represents the name of the property
*/
getUniqueValues: function (plans,property){
var uniqueStatus=[];
for (var i=0; i < plans.length; i++){
var plan = plans[i];
if (plan[property] &&
!Ext.Array.contains(uniqueStatus,plan[property])){
uniqueStatus.push(plan[property]);
}
}
return uniqueStatus;
},
getUniqueStatus: function(plans){
return this.getUniqueValues(plans,"planstatus");
},
getUniqueType: function(plans){
return this.getUniqueValues(plans,"typePlan");
},
/**
* Set the available types
*/
setTypes: function(types){
var values= [];
for (var i=0; i < types.length; i ++){
values.push({key: types[i],value : types[i]})
}
this.typeStore.loadData(values,false);
},
setStatus: function(status){
var values= [];
for (var i=0; i < status.length; i ++){
values.push({key: status[i],value : status[i]})
}
this.statusStore.loadData(values,false);
},
/**
* Called when plan is clicked
*/
onPlanClicked: function(plan){
this.setSelectedPlan(plan);
},
setSelectedPlan: function(plan){
this.selectedPlan = plan;
if (this.selectedPlan==null){
this.clearLayer();
this.selectedPlanContainer.update("Geen plan geselecteerd");
this.drawCommentButton.hide();
this.showAllCommentButton.hide();
this.setPlanCommentFilter(null);
}else{
if(plan.origin == 'Tercera' && plan.wms==undefined){
var me=this;
var id=plan.identificatie;
Ext.MessageBox.confirm({titel:'Verwerk plan',
msg:'Plan is niet verwerkt, wilt u het plan alsnog verwerken?',
width: 100,
buttons: Ext.Msg.YESNO,
buttonText: {
yes: "Ja",
no: "Nee"
},
fn: function(button,event){
if(button=="yes"){
var username=null;
if (user!=null){
username= encodeURIComponent(user.name);
}
var url= me.terceraRequestPage;
url+= url.indexOf("?">0) ? "&" : "?";
url+="idn="+encodeURIComponent(plan.identificatie);
if (username){
url+="&user="+username;
}
var link = document.getElementById("linkForVerwerk");
link.target = "_parent";
link.href = url;
link.click();
}
}
});
}else {
var prePlanText="";
var ogcProps={
exceptions: "application/vnd.ogc.se_inimage",
srs: "EPSG:28992",
version: "1.1.1",
styles: "",
format: "image/png",
transparent: true,
noCache: true
};
var options={};
if (plan.origin == 'Tercera'){
prePlanText = "(L0K) ";
Ext.Ajax.request({
url: this.roServiceUrl,
timeout: 240000,
scope:this,
params: {
wmsUrl: plan.wms,
getTerceraWMSLayers: 'b'
},
success: function ( result, request ) {
var res = Ext.JSON.decode(result.responseText);
if(res.success){
ogcProps.layers=res.layers;
ogcProps.query_layers=res.layers;
options.layers=res.layers;
}else{
Ext.MessageBox.alert('Foutmelding', "Fout bij laden plannen" + res.error);
}
this.setLayer(plan.wms,ogcProps,options);
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
},
failure: function ( result, request) {
Ext.MessageBox.alert('Foutmelding', "Fout bij ophalen plannen" + result.responseText);
}
});
}else{ //Ro-online plan
prePlanText="(RO) ";
ogcProps.layers=this.roonlineLayers.split(",");
ogcProps.query_layers=this.roonlineLayers.split(",");
options.layers= this.roonlineLayers.split(",");
if (window.location.hostname ==undefined || window.location.hostname != "localhost"){
ogcProps.sld = Ext.create("viewer.SLD").createURL(options.layers,null,null,null,null,"app:plangebied='"+plan.identificatie+"'");
}
this.setLayer(this.roonlineServiceUrl,ogcProps,options);
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
}
this.selectedPlanContainer.update(prePlanText+this.selectedPlan.identificatie);
}
this.setPlanCommentFilter(plan.identificatie);
if (plan.bbox){
var map=this.viewerController.mapComponent.getMap();
map.zoomToExtent(new viewer.viewercontroller.controller.Extent(plan.bbox.minx,plan.bbox.miny,plan.bbox.maxx,plan.bbox.maxy));
}
if (plan.verwijzingNaarTekst){
var docs = plan.verwijzingNaarTekst.split(",");
this.setDocs(docs);
}
this.drawCommentButton.show();
this.showAllCommentButton.show();
}
},
setDocs: function (docs){
if (this.docContainer === null){
this.docContainer = Ext.getCmp("docContainerValues");
}
this.docContainer.removeAll();
Ext.Array.forEach(docs, function(item,index){
var key=item.replace(" ","");
var value=key;
var name=this.getDocType(key);
var el={
xtype: 'container',
html: name,
width: '100%',
listeners:{
element: 'el',
click: function(){
window.open(value,key.replace(/ /g,"_"),{});
}
},
style: {
cursor: 'pointer'
}
};
this.docContainer.add(el);
},this);
},
setPlanCommentFilter: function(planId){
if (planId==null && this.planCommentFilter!==null){
this.viewerController.removeFilter(this.planCommentFilter.id,this.getCommentAppLayer());
this.planCommentFilter=null;
this.commentMapLayer.setVisible(false);
}else if (planId !=null){
this.planCommentFilter = Ext.create("viewer.components.CQLFilterWrapper",{
id: "planFilter_"+this.getName(),
cql: this.roComment.planIdAttributeName+"='"+planId+"'",
operator : "AND",
type: "ATTRIBUTE"
});
this.viewerController.setFilter(this.planCommentFilter,this.getCommentAppLayer());
this.commentMapLayer.setVisible(true);
}
},
getCommentAppLayer: function(){
if (this.commentAppLayer==null || this.commentAppLayer == undefined){
if (this.layers){
this.commentAppLayer = this.viewerController.getAppLayerById(this.layers[0]);
if (this.commentAppLayer){
this.commentMapLayer = this.viewerController.getOrCreateLayer(this.commentAppLayer);
var cql = "("+this.roComment.publicAttributeName+"=true"
if (user){
cql+= " OR "+this.roComment.ownerAttributeName+ "='"+user+"'";
}
cql+=")"
this.publicCommentfilter = Ext.create("viewer.components.CQLFilterWrapper",{
id: "publicFilter_"+this.getName(),
cql: cql,
operator : "AND",
type: "ATTRIBUTE"
});
this.viewerController.setFilter(this.publicCommentfilter,this.commentAppLayer);
}
}
}
return this.commentAppLayer;
},
showToc: function(){
this.roToc.show();
if (this.selectedPlan){
this.roToc.reset({
type: this.selectedPlan.origin,
planId: this.selectedPlan.identificatie,
wmsLayer: this.wmsLayer
});
}
},
drawComment: function(){
if(this.selectedPlan && this.selectedPlan.identificatie){
this.roComment.newComment(this.selectedPlan.identificatie);
}
},
showAllComment: function(){
this.roAllComment.getAllComments(this.selectedPlan.identificatie);
},
/**
* Load layer in map
*/
setLayer: function (url,props,options){
var index =-1;
if (this.wmsLayer!=null){
index = this.viewerController.mapComponent.getMap().getLayerIndex(this.wmsLayer);
}else if (this.layers){
index = this.viewerController.mapComponent.getMap().getLayerIndex(this.commentMapLayer);
//index--;
}
this.clearLayer();
this.wmsLayer = this.viewerController.mapComponent.createWMSLayer("rolayer", url ,props, options,this.viewerController);
this.wmsLayer.setDetails({
"summary.description" : "Omschrijving lalala",
"summary.link": "",
"summary.image": "",
"summary.title": ""
});
this.viewerController.mapComponent.getMap().addLayer(this.wmsLayer);
if (index>=0){
this.viewerController.mapComponent.getMap().setLayerIndex(this.wmsLayer,index);
this.viewerController.mapComponent.getMap().setLayerIndex(this.commentMapLayer,index+1);
}
},
clearLayer: function (){
if (this.wmsLayer!=null){
this.viewerController.mapComponent.getMap().removeLayer(this.wmsLayer);
delete this.wmsLayer;
}
},
/**
* Filter the plans
*/
filterCurrentPlans: function (type,status){
var plans=[];
for (var i=0; i < this.currentPlans.length; i++){
var plan = this.currentPlans[i];
var cmp = Ext.getCmp(this.currentPlans[i].identificatie);
var filtered=false;
if (type && plan.typePlan != type){
filtered=true;
}
if (status && plan.planstatus != status){
filtered=true;
}
if (!filtered){
plans.push(plan);
}
}
return plans;
},
/**
* Create a plan item.
*/
createPlanItem: function(planObj){
var me=this;
var color="#000000";
if (planObj.origin == "Tercera"){
if (planObj.wms){
color="green";
}else{
color="#888888";
}
}
var el={
xtype: 'container',
id: planObj.identificatie,
html: planObj.naam,
width: '100%',
border: 1,
listeners:{
element: 'el',
click: function(){
me.onPlanClicked(planObj);
}
},
style: {
color: color,
cursor: 'pointer'
}
};
return el;
},
/**
* Get the full type name of the document.
*/
getDocType: function(key){
var returnValue=key;
if (key.toLowerCase().indexOf("r_nl")>=0){
returnValue="Regels";
}else if(key.toLowerCase().indexOf("rb_nl")>=0){
returnValue="Bijlagen bij de regels";
}else if(key.toLowerCase().indexOf("t_nl")>=0){
returnValue="Toelichting";
}else if(key.toLowerCase().indexOf("tb_nl")>=0){
returnValue="Bijlagen bij de toelichting";
}else if(key.toLowerCase().indexOf("i_nl")>=0){
returnValue="Illustratie";
}else if(key.toLowerCase().indexOf("vb_nl")>=0){
returnValue="Vaststellingsbesluit";
}else if(key.toLowerCase().indexOf("v_nl")>=0){
returnValue="Voorschriften";
}else if(key.toLowerCase().indexOf("pt_nl")>=0){
returnValue="Plantekst";
}else if(key.toLowerCase().indexOf("g_nl")>=0){
returnValue="Geleideformulier";
}else if(key.toLowerCase().indexOf("d_nl")>=0){
returnValue="Besluitdocument";
}else if(key.toLowerCase().indexOf("db_nl")>=0){
returnValue="Bijlagen bij besluitdocument";
}else if(key.toLowerCase().indexOf("b_nl")>=0){
returnValue="Beleidstekst/besluittekst";
}else if(key.toLowerCase().indexOf("bb_nl")>=0){
returnValue="Bijlage bij beleidstekst/besluittekst";
}else if(key.toLowerCase().indexOf("p_nl")>=0){
returnValue="Plankaart";
}
return returnValue;
},
getExtComponents: function() {
return [ (this.panel !== null) ? this.panel.getId() : '' ];
}
});
| add id to Map Layer
| ro-tercera-component/RoTercera.js | add id to Map Layer | <ide><path>o-tercera-component/RoTercera.js
<ide> styles: "",
<ide> format: "image/png",
<ide> transparent: true,
<del> noCache: true
<add> noCache: true,
<ide> };
<del> var options={};
<add> var options={id: "RoTerceraLayer"};
<ide> if (plan.origin == 'Tercera'){
<ide> prePlanText = "(L0K) ";
<ide> Ext.Ajax.request({ |
|
Java | lgpl-2.1 | 99a707c076bb4e4fb421335d4935dc5afcf537d9 | 0 | RomanHargrave/cuelib | /*
* Cuelib library for manipulating cue sheets.
* Copyright (C) 2007-2008 Jan-Willem van den Broek
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package jwbroek.cuelib.tools.trackcutter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import jwbroek.cuelib.CueParser;
import jwbroek.cuelib.CueSheet;
import jwbroek.cuelib.FileData;
import jwbroek.cuelib.Position;
import jwbroek.cuelib.TrackData;
import jwbroek.io.StreamPiper;
import jwbroek.util.LogUtil;
/**
* <p>Class that can cut up files into tracks, based on the information provided by a cue sheet.</p>
* <p>It can do some audio type conversions, file naming based on information in the cue sheet, and
* offers the option of having the tracks post-processed by a another application based on information
* in the cue sheet.</p>
* @author jwbroek
*/
public class TrackCutter
{
/**
* The logger for this class.
*/
private final static Logger logger = Logger.getLogger(TrackCutter.class.getCanonicalName());
/**
* Configuation for the TrackCutter.
*/
private TrackCutterConfiguration configuration;
/**
* Create a new TrackCutter instance, based on the configuration provided.
* @param configuration
*/
public TrackCutter(final TrackCutterConfiguration configuration)
{
TrackCutter.logger.entering
(TrackCutter.class.getCanonicalName(), "TrackCutter(TrackCutterConfiguration)", configuration);
this.configuration = configuration;
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "TrackCutter(TrackCutterConfiguration)");
}
/**
* Cut the the files specified in the cue sheet into tracks.
* @param cueFile
* @throws IOException
*/
public void cutTracksInCueSheet(final File cueFile) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(File)", cueFile);
TrackCutter.logger.info("Cutting tracks in cue sheet from file '" + cueFile.toString() + "'.");
CueSheet cueSheet = null;
// If no parent directory specified, then set the parent directory of the cue file.
if (getConfiguration().getParentDirectory()==null)
{
getConfiguration().setParentDirectory(cueFile.getParentFile());
TrackCutter.logger.fine("Have set base directory to directory of File '" + cueFile.toString() + "'.");
}
try
{
TrackCutter.logger.fine("Parsing cue sheet.");
cueSheet = CueParser.parse(cueFile);
}
catch (IOException e)
{
TrackCutter.logger.severe("Was unable to parse the cue sheet in file '" + cueFile.toString() + "'.");
throw new IOException("Problem parsing cue file.", e);
}
cutTracksInCueSheet(cueSheet);
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(File)");
}
/**
* Cut the the files specified in the cue sheet that will be read from the InputStream into tracks.
* @param inputStream
* @throws IOException
*/
public void cutTracksInCueSheet(final InputStream inputStream) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(InputStream)", inputStream);
TrackCutter.logger.info("Cutting tracks in cue sheet from InputStream.");
CueSheet cueSheet = null;
try
{
TrackCutter.logger.fine("Parsing cue sheet.");
cueSheet = CueParser.parse(inputStream);
}
catch (IOException e)
{
TrackCutter.logger.severe("Was unable to parse the cue sheet from InputStream.");
LogUtil.logStacktrace(TrackCutter.logger, Level.SEVERE, e);
throw new IOException("Problem parsing cue file.", e);
}
cutTracksInCueSheet(cueSheet);
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(InputStream)");
}
/**
* Cut the the files specified in the cue sheet into tracks.
* @param cueSheet
* @throws IOException
*/
public void cutTracksInCueSheet(final CueSheet cueSheet) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(CueSheet)", cueSheet);
TrackCutter.logger.info("Cutting tracks in cue sheet.");
// We can process each file in the cue sheet independently.
for (FileData fileData : cueSheet.getFileData())
{
try
{
cutTracksInFileData(fileData);
}
catch (UnsupportedAudioFileException e)
{
TrackCutter.logger.severe
( "Encountered an " + e.getClass().getCanonicalName()
+ " when processing \"" + fileData.getFile() + "\": " + e.getMessage()
);
LogUtil.logStacktrace(TrackCutter.logger, Level.FINE, e);
}
catch (IOException e)
{
TrackCutter.logger.severe
( "Encountered an " + e.getClass().getCanonicalName()
+ " when processing \"" + fileData.getFile() + "\": " + e.getMessage()
);
LogUtil.logStacktrace(TrackCutter.logger, Level.FINE, e);
}
}
TrackCutter.logger.info("Done cutting tracks in cue sheet.");
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(CueSheet)");
}
/**
* Cut the the files specified in the FileData into tracks.
* @param fileData
* @throws IOException
* @throws UnsupportedAudioFileException
*/
private void cutTracksInFileData(final FileData fileData)
throws IOException, UnsupportedAudioFileException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(FileData)", fileData);
TrackCutter.logger.info("Cutting tracks from file: '" + fileData.getFile() + "'.");
AudioInputStream audioInputStream = null;
try
{
// Determine the complete path to the audio file.
TrackCutter.logger.fine("Determining complete path to audio file.");
File audioFile = getConfiguration().getAudioFile(fileData);
// Open the audio file.
// Sadly, we can't do much with the file type information from the cue sheet, as javax.sound.sampled
// needs more information before it can process a specific type of sound file. Best then to let it
// determine all aspects of the audio type by itself.
TrackCutter.logger.fine("Opening audio stream.");
audioInputStream = AudioSystem.getAudioInputStream(audioFile);
// Current position in terms of the frames as per audioInputStream.getFrameLength().
// Note that these frames need not be equal to cue sheet frames.
long currentAudioFramePos = 0;
// Process tracks.
for (TrackCutterProcessingAction processAction : getProcessActionList(fileData))
{
currentAudioFramePos = performProcessAction
( processAction
, audioInputStream
, currentAudioFramePos
);
}
}
finally
{
if (audioInputStream!=null)
{
// Don't handle exceptions, as there's really nothing we can do about them.
TrackCutter.logger.fine("Closing audio stream.");
audioInputStream.close();
}
}
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(FileData)");
}
/**
* Get a list of ProcessActions based on the specified FileData.
* @param fileData
* @return A list of ProcessActions based on the specified FileData.
*/
private List<TrackCutterProcessingAction> getProcessActionList(final FileData fileData)
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "getProcessActionList(FileData)", fileData);
TrackCutter.logger.fine("Determining processing actions for file: '" + fileData.getFile() + "'.");
List<TrackCutterProcessingAction> result = new ArrayList<TrackCutterProcessingAction>();
TrackData previousTrackData = null;
// Process all tracks in turn.
for (TrackData currentTrackData : fileData.getTrackData())
{
if (previousTrackData != null)
{
if (currentTrackData.getIndex(0) != null)
{
addProcessActions(previousTrackData, currentTrackData.getIndex(0).getPosition(), result);
}
else
{
addProcessActions(previousTrackData, currentTrackData.getIndex(1).getPosition(), result);
}
}
previousTrackData = currentTrackData;
}
// Handle last track, if any.
if (previousTrackData != null)
{
addProcessActions(previousTrackData, null, result);
}
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "getProcessActionList(FileData)", result);
return result;
}
/**
* Add ProcesAction instances for the specified TrackData.
* @param trackData
* @param nextPosition The first position after the current track, or null if there is no next position.
* (Track continues until the end of data.)
* @param processActions A list of ProcessAction instances to which the actions for this TrackData
* will be added.
*/
private void addProcessActions
( final TrackData trackData
, final Position nextPosition
, final List<TrackCutterProcessingAction> processActions
)
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "addProcessActions(TrackData,Position,List<TrackCutterProcessingAction>)"
, new Object[] {trackData, nextPosition, processActions}
);
TrackCutter.logger.fine("Adding processing action for track #" + trackData.getNumber() + ".");
if (trackData.getIndex(0) == null)
{
// No pregap to handle. Just process this track.
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(1).getPosition()
, nextPosition
, trackData
, false
, getConfiguration()
)
);
}
else
{
switch (configuration.getPregapHandling())
{
case DISCARD:
// Discard the pregap, process the track.
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(1).getPosition()
, nextPosition
, trackData
, false
, getConfiguration()
)
);
break;
case PREPEND:
// Prepend the pregap, if long enough.
if ( trackData.getIndex(1).getPosition().getTotalFrames()
- trackData.getIndex(0).getPosition().getTotalFrames()
>= this.getConfiguration().getPregapFrameLengthThreshold()
)
{
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(0).getPosition()
, nextPosition
, trackData
, true
, getConfiguration()
)
);
}
else
{
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(1).getPosition()
, nextPosition
, trackData
, false
, getConfiguration()
)
);
}
break;
case SEPARATE:
// Add pregap and track as separate tracks.
// Prepend the pregap, if long enough.
if ( trackData.getIndex(1).getPosition().getTotalFrames()
- trackData.getIndex(0).getPosition().getTotalFrames()
>= this.getConfiguration().getPregapFrameLengthThreshold()
)
{
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(0).getPosition()
, trackData.getIndex(1).getPosition()
, trackData
, true
, getConfiguration()
)
);
}
processActions.add
( new TrackCutterProcessingAction
( trackData.getIndex(1).getPosition()
, nextPosition
, trackData
, false
, getConfiguration()
)
);
break;
}
}
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "addProcessActions(TrackData,Position,List<TrackCutterProcessingAction>)"
);
}
/**
* Perform the specified ProcessAction.
* @param processAction
* @param audioInputStream The audio stream from which to read.
* @param currentAudioFramePos The current frame position in the audio stream.
* @return The current frame position after processing.
* @throws IOException
*/
private long performProcessAction ( final TrackCutterProcessingAction processAction
, final AudioInputStream audioInputStream
, final long currentAudioFramePos
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream,long)"
, new Object[] {processAction, audioInputStream, currentAudioFramePos}
);
TrackCutter.logger.fine
( "Determining audio substream for processing action for "
+ (processAction.getIsPregap()?"pregap of ":"") + "track #"
+ processAction.getTrackData().getNumber() + "."
);
// Skip positions in the audioInputStream until we are at out starting position.
long fromAudioFramePos = skipToPosition (processAction.getStartPosition(), audioInputStream, currentAudioFramePos);
// Determine the position to which we should read from the input.
long toAudioFramePos = audioInputStream.getFrameLength();
if (processAction.getEndPosition() != null)
{
toAudioFramePos = getAudioFormatFrames(processAction.getEndPosition(), audioInputStream.getFormat());
}
performProcessAction
( processAction
, new AudioInputStream(audioInputStream, audioInputStream.getFormat(), toAudioFramePos - fromAudioFramePos)
);
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream,long)"
);
return toAudioFramePos;
}
/**
* Perform the specified ProcessAction.
* @param processAction
* @param audioInputStream The audio stream from which to read. This stream will be closed afterward.
* @throws IOException
*/
private void performProcessAction ( final TrackCutterProcessingAction processAction
, final AudioInputStream audioInputStream
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream)"
, new Object[] {processAction, audioInputStream}
);
TrackCutter.logger.info
( "Performing processing action for " + (processAction.getIsPregap()?"pregap of ":"")
+ "track #" + processAction.getTrackData().getNumber() + "."
);
if (!getConfiguration().getRedirectToPostprocessing())
{
// We're going to create target files, so make sure there's a directory for them.
TrackCutter.logger.fine("Creating directory for target files.");
processAction.getCutFile().getParentFile().mkdirs();
}
if (configuration.getDoPostProcessing() && configuration.getRedirectToPostprocessing())
{
OutputStream audioOutputStream = null;
try
{
TrackCutter.logger.fine("Writing audio to postprocessor.");
audioOutputStream = this.createPostProcessingProcess(processAction).getOutputStream();
AudioSystem.write(audioInputStream, configuration.getTargetType(), audioOutputStream);
}
finally
{
if (audioOutputStream!=null)
{
// We can't do anything about any exceptions here, so we don't catch them.
TrackCutter.logger.fine("Closing audio stream.");
audioOutputStream.close();
}
}
}
else
{
TrackCutter.logger.fine("Writing audio to file.");
AudioSystem.write(audioInputStream, configuration.getTargetType(), processAction.getCutFile());
if (configuration.getDoPostProcessing())
{
TrackCutter.logger.fine("Performing postprocessing.");
this.createPostProcessingProcess(processAction);
}
}
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream)"
);
}
/**
* Create the specified post-processing process.
* @param processAction
* @return The specified post-processing process.
* @throws IOException
*/
private Process createPostProcessingProcess
( final TrackCutterProcessingAction processAction
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction)"
, processAction
);
TrackCutter.logger.fine("Creating post-processing process for command: " + processAction.getPostProcessCommand());
processAction.getPostProcessFile().getParentFile().mkdirs();
Process process = Runtime.getRuntime().exec(processAction.getPostProcessCommand());
StreamPiper.pipeStream(process.getInputStream(), processAction.getStdOutRedirectFile());
StreamPiper.pipeStream(process.getErrorStream(), processAction.getErrRedirectFile());
TrackCutter.logger.exiting
(TrackCutter.class.getCanonicalName(), "performProcessAction(TrackCutterProcessingAction)", process);
return process;
}
/**
* Get the number of AudioFormat frames represented by the specified Position. Note that an AudioFormat
* frame may represent a longer or shorter time than a cue sheet frame.
* @param position
* @param audioFileFormat
* @return The number of AudioFormat frames represented by the specified Position. Note that an AudioFormat
* frame may represent a longer or shorter time than a cue sheet frame.
*/
private static long getAudioFormatFrames(final Position position, final AudioFormat audioFormat)
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "getAudioFormatFrames(Position,AudioFormat)"
, new Object[] {position, audioFormat}
);
// Determine closest frame number.
long result = (long) Math.round(((double) audioFormat.getFrameRate())/75 * position.getTotalFrames());
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "getAudioFormatFrames(Position,AudioFormat)"
, result
);
return result;
}
/**
* Skip to the specified position in the audio data.
* @param toPosition The position to skip to.
* @param audioInputStream The audio data to skip in.
* @param currentAudioFramePos The current position in frames in the audio data.
* @return The frame position in the audio data after skipping.
* @throws IOException
*/
private long skipToPosition
( Position toPosition
, AudioInputStream audioInputStream
, long currentAudioFramePos
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "skipToPosition(Position,AudioInputStream,long)"
, new Object[] {toPosition, audioInputStream, currentAudioFramePos}
);
long toAudioFramePos = getAudioFormatFrames(toPosition, audioInputStream.getFormat());
audioInputStream.skip((toAudioFramePos - currentAudioFramePos) * audioInputStream.getFormat().getFrameSize());
TrackCutter.logger.exiting
(TrackCutter.class.getCanonicalName(), "skipToPosition(Position,AudioInputStream,long)", toAudioFramePos);
return toAudioFramePos;
}
/**
* Get the configuration for this TrackCutter.
* @return The configuration for this TrackCutter.
*/
private TrackCutterConfiguration getConfiguration()
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "getConfiguration()");
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "getConfiguration()", this.configuration);
return this.configuration;
}
}
| src/jwbroek/cuelib/tools/trackcutter/TrackCutter.java | /*
* Cuelib library for manipulating cue sheets.
* Copyright (C) 2007-2008 Jan-Willem van den Broek
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package jwbroek.cuelib.tools.trackcutter;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.UnsupportedAudioFileException;
import jwbroek.cuelib.CueParser;
import jwbroek.cuelib.CueSheet;
import jwbroek.cuelib.FileData;
import jwbroek.cuelib.Position;
import jwbroek.cuelib.TrackData;
import jwbroek.io.StreamPiper;
import jwbroek.util.LogUtil;
/**
* <p>Class that can cut up files into tracks, based on the information provided by a cue sheet.</p>
* <p>It can do some audio type conversions, file naming based on information in the cue sheet, and
* offers the option of having the tracks post-processed by a another application based on information
* in the cue sheet.</p>
* @author jwbroek
*/
public class TrackCutter
{
/**
* The logger for this class.
*/
private final static Logger logger = Logger.getLogger(TrackCutter.class.getCanonicalName());
/**
* Configuation for the TrackCutter.
*/
private TrackCutterConfiguration configuration;
/**
* Create a new TrackCutter instance, based on the configuration provided.
* @param configuration
*/
public TrackCutter(final TrackCutterConfiguration configuration)
{
TrackCutter.logger.entering
(TrackCutter.class.getCanonicalName(), "TrackCutter(TrackCutterConfiguration)", configuration);
this.configuration = configuration;
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "TrackCutter(TrackCutterConfiguration)");
}
/**
* Cut the the files specified in the cue sheet into tracks.
* @param cueFile
* @throws IOException
*/
public void cutTracksInCueSheet(final File cueFile) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(File)", cueFile);
TrackCutter.logger.info("Cutting tracks in cue sheet from file '" + cueFile.toString() + "'.");
CueSheet cueSheet = null;
// If no parent directory specified, then set the parent directory of the cue file.
if (getConfiguration().getParentDirectory()==null)
{
getConfiguration().setParentDirectory(cueFile.getParentFile());
TrackCutter.logger.fine("Have set base directory to directory of File '" + cueFile.toString() + "'.");
}
try
{
TrackCutter.logger.fine("Parsing cue sheet.");
cueSheet = CueParser.parse(cueFile);
}
catch (IOException e)
{
TrackCutter.logger.severe("Was unable to parse the cue sheet in file '" + cueFile.toString() + "'.");
throw new IOException("Problem parsing cue file.", e);
}
cutTracksInCueSheet(cueSheet);
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(File)");
}
/**
* Cut the the files specified in the cue sheet that will be read from the InputStream into tracks.
* @param inputStream
* @throws IOException
*/
public void cutTracksInCueSheet(final InputStream inputStream) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(InputStream)", inputStream);
TrackCutter.logger.info("Cutting tracks in cue sheet from InputStream.");
CueSheet cueSheet = null;
try
{
TrackCutter.logger.fine("Parsing cue sheet.");
cueSheet = CueParser.parse(inputStream);
}
catch (IOException e)
{
TrackCutter.logger.severe("Was unable to parse the cue sheet from InputStream.");
LogUtil.logStacktrace(TrackCutter.logger, Level.SEVERE, e);
throw new IOException("Problem parsing cue file.", e);
}
cutTracksInCueSheet(cueSheet);
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(InputStream)");
}
/**
* Cut the the files specified in the cue sheet into tracks.
* @param cueSheet
* @throws IOException
*/
public void cutTracksInCueSheet(final CueSheet cueSheet) throws IOException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(CueSheet)", cueSheet);
TrackCutter.logger.info("Cutting tracks in cue sheet.");
// We can process each file in the cue sheet independently.
for (FileData fileData : cueSheet.getFileData())
{
try
{
cutTracksInFileData(fileData);
}
catch (UnsupportedAudioFileException e)
{
logCaughtException(e);
}
catch (IOException e)
{
logCaughtException(e);
}
}
TrackCutter.logger.info("Done cutting tracks in cue sheet.");
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(CueSheet)");
}
/**
* Cut the the files specified in the FileData into tracks.
* @param fileData
* @throws IOException
* @throws UnsupportedAudioFileException
*/
private void cutTracksInFileData(final FileData fileData)
throws IOException, UnsupportedAudioFileException
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(FileData)", fileData);
TrackCutter.logger.info("Cutting tracks from file: '" + fileData.getFile() + "'.");
AudioInputStream audioInputStream = null;
try
{
// Determine the complete path to the audio file.
TrackCutter.logger.fine("Determining complete path to audio file.");
File audioFile = getConfiguration().getAudioFile(fileData);
// Open the audio file.
// Sadly, we can't do much with the file type information from the cue sheet, as javax.sound.sampled
// needs more information before it can process a specific type of sound file. Best then to let it
// determine all aspects of the audio type by itself.
TrackCutter.logger.fine("Opening audio stream.");
audioInputStream = AudioSystem.getAudioInputStream(audioFile);
// Current position in terms of the frames as per audioInputStream.getFrameLength().
// Note that these frames need not be equal to cue sheet frames.
long currentAudioFramePos = 0;
// Process tracks.
for (TrackCutterProcessingAction processAction : getProcessActionList(fileData))
{
currentAudioFramePos = performProcessAction
( processAction
, audioInputStream
, currentAudioFramePos
);
}
}
finally
{
if (audioInputStream!=null)
{
// Don't handle exceptions, as there's really nothing we can do about them.
TrackCutter.logger.fine("Closing audio stream.");
audioInputStream.close();
}
}
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "cutTracksInCueSheet(FileData)");
}
/**
* Get a list of ProcessActions based on the specified FileData.
* @param fileData
* @return A list of ProcessActions based on the specified FileData.
*/
private List<TrackCutterProcessingAction> getProcessActionList(final FileData fileData)
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "getProcessActionList(FileData)", fileData);
TrackCutter.logger.fine("Determining processing actions for file: '" + fileData.getFile() + "'.");
List<TrackCutterProcessingAction> result = new ArrayList<TrackCutterProcessingAction>();
TrackData previousTrackData = null;
// Process all tracks in turn.
for (TrackData currentTrackData : fileData.getTrackData())
{
if (previousTrackData != null)
{
if (currentTrackData.getIndex(0) != null)
{
addProcessActions(previousTrackData, currentTrackData.getIndex(0).getPosition(), result);
}
else
{
addProcessActions(previousTrackData, currentTrackData.getIndex(1).getPosition(), result);
}
}
previousTrackData = currentTrackData;
}
// Handle last track, if any.
if (previousTrackData != null)
{
addProcessActions(previousTrackData, null, result);
}
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "getProcessActionList(FileData)", result);
return result;
}
/**
* Add ProcesAction instances for the specified TrackData.
* @param trackData
* @param nextPosition The first position after the current track, or null if there is no next position.
* (Track continues until the end of data.)
* @param processActions A list of ProcessAction instances to which the actions for this TrackData
* will be added.
*/
private void addProcessActions
( final TrackData trackData
, final Position nextPosition
, final List<TrackCutterProcessingAction> processActions
)
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "addProcessActions(TrackData,Position,List<TrackCutterProcessingAction>)"
, new Object[] {trackData, nextPosition, processActions}
);
TrackCutter.logger.fine("Adding processing action for track #" + trackData.getNumber() + ".");
if (trackData.getIndex(0) == null)
{
// No pregap to handle. Just process this track.
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
}
else
{
switch (configuration.getPregapHandling())
{
case DISCARD:
// Discard the pregap, process the track.
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
break;
case PREPEND:
// Prepend the pregap, if long enough.
if ( trackData.getIndex(1).getPosition().getTotalFrames()
- trackData.getIndex(0).getPosition().getTotalFrames()
>= this.getConfiguration().getPregapFrameLengthThreshold()
)
{
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(0).getPosition(), nextPosition, trackData, true, getConfiguration()));
}
else
{
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
}
break;
case SEPARATE:
// Add pregap and track as separate tracks.
// Prepend the pregap, if long enough.
if ( trackData.getIndex(1).getPosition().getTotalFrames()
- trackData.getIndex(0).getPosition().getTotalFrames()
>= this.getConfiguration().getPregapFrameLengthThreshold()
)
{
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(0).getPosition(), trackData.getIndex(1).getPosition(), trackData, true, getConfiguration()));
}
processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
break;
}
}
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "addProcessActions(TrackData,Position,List<TrackCutterProcessingAction>)"
);
}
/**
* Perform the specified ProcessAction.
* @param processAction
* @param audioInputStream The audio stream from which to read.
* @param currentAudioFramePos The current frame position in the audio stream.
* @return The current frame position after processing.
* @throws IOException
*/
private long performProcessAction ( final TrackCutterProcessingAction processAction
, final AudioInputStream audioInputStream
, final long currentAudioFramePos
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream,long)"
, new Object[] {processAction, audioInputStream, currentAudioFramePos}
);
TrackCutter.logger.fine
( "Determining audio substream for processing action for "
+ (processAction.getIsPregap()?"pregap of ":"") + "track #"
+ processAction.getTrackData().getNumber() + "."
);
// Skip positions in the audioInputStream until we are at out starting position.
long fromAudioFramePos = skipToPosition (processAction.getStartPosition(), audioInputStream, currentAudioFramePos);
// Determine the position to which we should read from the input.
long toAudioFramePos = audioInputStream.getFrameLength();
if (processAction.getEndPosition() != null)
{
toAudioFramePos = getAudioFormatFrames(processAction.getEndPosition(), audioInputStream.getFormat());
}
performProcessAction
( processAction
, new AudioInputStream(audioInputStream, audioInputStream.getFormat(), toAudioFramePos - fromAudioFramePos)
);
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream,long)"
);
return toAudioFramePos;
}
/**
* Perform the specified ProcessAction.
* @param processAction
* @param audioInputStream The audio stream from which to read. This stream will be closed afterward.
* @throws IOException
*/
private void performProcessAction ( final TrackCutterProcessingAction processAction
, final AudioInputStream audioInputStream
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream)"
, new Object[] {processAction, audioInputStream}
);
TrackCutter.logger.info
( "Performing processing action for " + (processAction.getIsPregap()?"pregap of ":"")
+ "track #" + processAction.getTrackData().getNumber() + "."
);
if (!getConfiguration().getRedirectToPostprocessing())
{
// We're going to create target files, so make sure there's a directory for them.
TrackCutter.logger.fine("Creating directory for target files.");
processAction.getCutFile().getParentFile().mkdirs();
}
if (configuration.getDoPostProcessing() && configuration.getRedirectToPostprocessing())
{
OutputStream audioOutputStream = null;
try
{
TrackCutter.logger.fine("Writing audio to postprocessor.");
audioOutputStream = this.createPostProcessingProcess(processAction).getOutputStream();
AudioSystem.write(audioInputStream, configuration.getTargetType(), audioOutputStream);
}
finally
{
if (audioOutputStream!=null)
{
// We can't do anything about any exceptions here, so we don't catch them.
TrackCutter.logger.fine("Closing audio stream.");
audioOutputStream.close();
}
}
}
else
{
TrackCutter.logger.fine("Writing audio to file.");
AudioSystem.write(audioInputStream, configuration.getTargetType(), processAction.getCutFile());
if (configuration.getDoPostProcessing())
{
TrackCutter.logger.fine("Performing postprocessing.");
this.createPostProcessingProcess(processAction);
}
}
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction,AudioInputStream)"
);
}
/**
* Create the specified post-processing process.
* @param processAction
* @return The specified post-processing process.
* @throws IOException
*/
private Process createPostProcessingProcess
( final TrackCutterProcessingAction processAction
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "performProcessAction(TrackCutterProcessingAction)"
, processAction
);
TrackCutter.logger.fine("Creating post-processing process for command: " + processAction.getPostProcessCommand());
processAction.getPostProcessFile().getParentFile().mkdirs();
Process process = Runtime.getRuntime().exec(processAction.getPostProcessCommand());
StreamPiper.pipeStream(process.getInputStream(), processAction.getStdOutRedirectFile());
StreamPiper.pipeStream(process.getErrorStream(), processAction.getErrRedirectFile());
TrackCutter.logger.exiting
(TrackCutter.class.getCanonicalName(), "performProcessAction(TrackCutterProcessingAction)", process);
return process;
}
/**
* Get the number of AudioFormat frames represented by the specified Position. Note that an AudioFormat
* frame may represent a longer or shorter time than a cue sheet frame.
* @param position
* @param audioFileFormat
* @return The number of AudioFormat frames represented by the specified Position. Note that an AudioFormat
* frame may represent a longer or shorter time than a cue sheet frame.
*/
private static long getAudioFormatFrames(final Position position, final AudioFormat audioFormat)
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "getAudioFormatFrames(Position,AudioFormat)"
, new Object[] {position, audioFormat}
);
// Determine closest frame number.
long result = (long) Math.round(((double) audioFormat.getFrameRate())/75 * position.getTotalFrames());
TrackCutter.logger.exiting
( TrackCutter.class.getCanonicalName()
, "getAudioFormatFrames(Position,AudioFormat)"
, result
);
return result;
}
/**
* Skip to the specified position in the audio data.
* @param toPosition The position to skip to.
* @param audioInputStream The audio data to skip in.
* @param currentAudioFramePos The current position in frames in the audio data.
* @return The frame position in the audio data after skipping.
* @throws IOException
*/
private long skipToPosition
( Position toPosition
, AudioInputStream audioInputStream
, long currentAudioFramePos
) throws IOException
{
TrackCutter.logger.entering
( TrackCutter.class.getCanonicalName()
, "skipToPosition(Position,AudioInputStream,long)"
, new Object[] {toPosition, audioInputStream, currentAudioFramePos}
);
long toAudioFramePos = getAudioFormatFrames(toPosition, audioInputStream.getFormat());
audioInputStream.skip((toAudioFramePos - currentAudioFramePos) * audioInputStream.getFormat().getFrameSize());
TrackCutter.logger.exiting
(TrackCutter.class.getCanonicalName(), "skipToPosition(Position,AudioInputStream,long)", toAudioFramePos);
return toAudioFramePos;
}
/**
* Log an exception that was caught.
* @param exception The exception that was caught and should now be logged.
*/
private void logCaughtException(Exception exception)
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "logCaughtException(Exception)", exception);
TrackCutter.logger.severe
("Encountered an " + exception.getClass().getCanonicalName() + ": " + exception.getMessage());
StringWriter writer = new StringWriter();
exception.printStackTrace(new PrintWriter(writer));
TrackCutter.logger.fine(writer.toString());
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "logCaughtException(Exception)");
}
/**
* Get the configuration for this TrackCutter.
* @return The configuration for this TrackCutter.
*/
private TrackCutterConfiguration getConfiguration()
{
TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "getConfiguration()");
TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "getConfiguration()", this.configuration);
return this.configuration;
}
}
| Fix for Issue 18, "TrackCutter normalizes all metadata fields in a post-processing command."
Some whitespace and logging changes. | src/jwbroek/cuelib/tools/trackcutter/TrackCutter.java | Fix for Issue 18, "TrackCutter normalizes all metadata fields in a post-processing command." | <ide><path>rc/jwbroek/cuelib/tools/trackcutter/TrackCutter.java
<ide> import java.io.IOException;
<ide> import java.io.InputStream;
<ide> import java.io.OutputStream;
<del>import java.io.PrintWriter;
<del>import java.io.StringWriter;
<ide> import java.util.ArrayList;
<ide> import java.util.List;
<ide> import java.util.logging.Level;
<ide> }
<ide> catch (UnsupportedAudioFileException e)
<ide> {
<del> logCaughtException(e);
<add> TrackCutter.logger.severe
<add> ( "Encountered an " + e.getClass().getCanonicalName()
<add> + " when processing \"" + fileData.getFile() + "\": " + e.getMessage()
<add> );
<add> LogUtil.logStacktrace(TrackCutter.logger, Level.FINE, e);
<ide> }
<ide> catch (IOException e)
<ide> {
<del> logCaughtException(e);
<add> TrackCutter.logger.severe
<add> ( "Encountered an " + e.getClass().getCanonicalName()
<add> + " when processing \"" + fileData.getFile() + "\": " + e.getMessage()
<add> );
<add> LogUtil.logStacktrace(TrackCutter.logger, Level.FINE, e);
<ide> }
<ide> }
<ide> TrackCutter.logger.info("Done cutting tracks in cue sheet.");
<ide> if (trackData.getIndex(0) == null)
<ide> {
<ide> // No pregap to handle. Just process this track.
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(1).getPosition()
<add> , nextPosition
<add> , trackData
<add> , false
<add> , getConfiguration()
<add> )
<add> );
<ide> }
<ide> else
<ide> {
<ide> {
<ide> case DISCARD:
<ide> // Discard the pregap, process the track.
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(1).getPosition()
<add> , nextPosition
<add> , trackData
<add> , false
<add> , getConfiguration()
<add> )
<add> );
<ide> break;
<ide> case PREPEND:
<ide> // Prepend the pregap, if long enough.
<ide> >= this.getConfiguration().getPregapFrameLengthThreshold()
<ide> )
<ide> {
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(0).getPosition(), nextPosition, trackData, true, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(0).getPosition()
<add> , nextPosition
<add> , trackData
<add> , true
<add> , getConfiguration()
<add> )
<add> );
<ide> }
<ide> else
<ide> {
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(1).getPosition()
<add> , nextPosition
<add> , trackData
<add> , false
<add> , getConfiguration()
<add> )
<add> );
<ide> }
<ide> break;
<ide> case SEPARATE:
<ide> >= this.getConfiguration().getPregapFrameLengthThreshold()
<ide> )
<ide> {
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(0).getPosition(), trackData.getIndex(1).getPosition(), trackData, true, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(0).getPosition()
<add> , trackData.getIndex(1).getPosition()
<add> , trackData
<add> , true
<add> , getConfiguration()
<add> )
<add> );
<ide> }
<del> processActions.add(new TrackCutterProcessingAction(trackData.getIndex(1).getPosition(), nextPosition, trackData, false, getConfiguration()));
<add> processActions.add
<add> ( new TrackCutterProcessingAction
<add> ( trackData.getIndex(1).getPosition()
<add> , nextPosition
<add> , trackData
<add> , false
<add> , getConfiguration()
<add> )
<add> );
<ide> break;
<ide> }
<ide> }
<ide> , new Object[] {toPosition, audioInputStream, currentAudioFramePos}
<ide> );
<ide> long toAudioFramePos = getAudioFormatFrames(toPosition, audioInputStream.getFormat());
<del> audioInputStream.skip((toAudioFramePos - currentAudioFramePos) * audioInputStream.getFormat().getFrameSize());
<add> audioInputStream.skip((toAudioFramePos - currentAudioFramePos) * audioInputStream.getFormat().getFrameSize());
<ide> TrackCutter.logger.exiting
<ide> (TrackCutter.class.getCanonicalName(), "skipToPosition(Position,AudioInputStream,long)", toAudioFramePos);
<ide> return toAudioFramePos;
<ide> }
<ide>
<ide> /**
<del> * Log an exception that was caught.
<del> * @param exception The exception that was caught and should now be logged.
<del> */
<del> private void logCaughtException(Exception exception)
<del> {
<del> TrackCutter.logger.entering(TrackCutter.class.getCanonicalName(), "logCaughtException(Exception)", exception);
<del> TrackCutter.logger.severe
<del> ("Encountered an " + exception.getClass().getCanonicalName() + ": " + exception.getMessage());
<del> StringWriter writer = new StringWriter();
<del> exception.printStackTrace(new PrintWriter(writer));
<del> TrackCutter.logger.fine(writer.toString());
<del> TrackCutter.logger.exiting(TrackCutter.class.getCanonicalName(), "logCaughtException(Exception)");
<del> }
<del>
<del> /**
<ide> * Get the configuration for this TrackCutter.
<ide> * @return The configuration for this TrackCutter.
<ide> */ |
|
Java | mit | df1ed415d4139636c8a4c4a8b717b110c7112f51 | 0 | eduardodaluz/xfire,eduardodaluz/xfire | package org.codehaus.xfire.annotations;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
/**
* @author <a href="mailto:[email protected]">Tomasz Sztelak</a>
*
*/
public class AnnotationsValidatorImpl implements AnnotationsValidator {
private WebAnnotations annotations;
protected void validateClass(Class clazz) throws AnnotationException {
int modz = clazz.getModifiers();
if (!clazz.isInterface()
&& (Modifier.isAbstract(modz) || Modifier.isFinal(modz))) {
throwException("Webservice class can't be abstract or final",
"4.1.1", "Class : " + clazz.getName());
}
WebServiceAnnotation webAnn = annotations
.getWebServiceAnnotation(clazz);
if (webAnn.getPortName() != null && webAnn.getPortName().length() > 0
&& clazz.isInterface()) {
throwException("Port name can't be defined on interface", "4.1.1",
"Class : " + clazz.getName());
}
if (webAnn.getEndpointInterface() != null
&& webAnn.getEndpointInterface().length() > 0
&& clazz.isInterface()) {
throwException("EndpointInterface can't be defined on interface",
"4.1.1", " Class : " + clazz.getName());
}
if (annotations.hasHandlerChainAnnotation(clazz)) {
throwException(
"Annotations @HandlerChain is NOT supported. Use @InHandlers instread",
"0.0", "Class : " + clazz.getName());
}
Method[] methods = clazz.getMethods();
for (int m = 0; m < methods.length; m++) {
Method method = methods[m];
if (annotations.hasWebMethodAnnotation(method)) {
validateMethod(method);
}
}
}
/**
* @param method
* @throws AnnotationException
*/
protected void validateOneWay(Method method) throws AnnotationException {
if (method.getExceptionTypes().length > 0
|| !method.getGenericReturnType().equals(void.class)) {
throwException(
"Method annotated with @OneWay annotation cannot have return value or declarated exceptions",
"4.3.1", "Method : " + method.getName());
}
for (int p = 0; p < method.getParameterTypes().length; p++) {
WebParamAnnotation param = annotations.getWebParamAnnotation(
method, p);
if (param != null
&& (param.getMode() == WebParamAnnotation.MODE_INOUT || param
.getMode() == WebParamAnnotation.MODE_OUT)) {
throwException(
"Method annotated with @OneWay annotation cannot have INOUT/OUT parameters",
"4.3.1", "Method : " + method.getName());
}
}
}
/**
* @param method
* @throws AnnotationException
*/
protected void validateMethod(Method method) throws AnnotationException {
if (!Modifier.isPublic(method.getModifiers())) {
throwException("Method annotated with @WebMethod must be public",
"3.3", "Method : " + method.getName());
}
if (annotations.hasOnewayAnnotation(method)) {
validateOneWay(method);
}
if (void.class.equals(method.getReturnType())
&& annotations.hasWebResultAnnotation(method)) {
throwException(
"Method with result can't have @WebResult annotations",
"0.0", "Method : " + method.getName());
}
/*
* for (int p = 0; p < method.getParameterTypes().length; p++) {
* WebParamAnnotation param = annotations.getWebParamAnnotation( method,
* p); if (param != null && (param.getMode() ==
* WebParamAnnotation.MODE_INOUT || param .getMode() ==
* WebParamAnnotation.MODE_OUT) && !param.isHeader()) { throwException(
* "Parameters with mode INOUT/OUT must be marked as header=true",
* "4.4.1","Method : "+ method.getName()); } }
*/
}
public void validate(WebAnnotations annotations, Class clazz)
throws AnnotationException {
this.annotations = annotations;
validateClass(clazz);
}
private void throwException(String message, String spec, String details)
throws AnnotationException {
throw new AnnotationException(message + ". JSR181 spec : " + spec
+ ". " + details);
}
}
| xfire-annotations/src/main/org/codehaus/xfire/annotations/AnnotationsValidatorImpl.java | package org.codehaus.xfire.annotations;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
/**
* @author <a href="mailto:[email protected]">Tomasz Sztelak</a>
*
*/
public class AnnotationsValidatorImpl implements AnnotationsValidator {
private WebAnnotations annotations;
protected void validateClass(Class clazz) throws AnnotationException {
WebServiceAnnotation webAnn = annotations
.getWebServiceAnnotation(clazz);
if (webAnn.getPortName() != null && webAnn.getPortName().length() > 0
&& clazz.isInterface()) {
throwException("Port name can't be defined on interface", "4.1.1", "Class : "+clazz.getName());
}
if (webAnn.getEndpointInterface() != null
&& webAnn.getEndpointInterface().length() > 0
&& clazz.isInterface()) {
throwException("EndpointInterface can't be defined on interface",
"4.1.1", " Class : "+clazz.getName());
}
Method[] methods = clazz.getMethods();
for (int m = 0; m < methods.length; m++) {
Method method = methods[m];
if (annotations.hasWebMethodAnnotation(method)) {
validateMethod(method);
}
}
}
protected void validateOneWay(Method method) throws AnnotationException {
if (method.getExceptionTypes().length > 0
|| !method.getGenericReturnType().equals(void.class)) {
throwException(
"Method annotated with @OneWay annotation cannot have return value or declarated exceptions",
"4.3.1","Method : "+ method.getName());
}
for (int p = 0; p < method.getParameterTypes().length; p++) {
WebParamAnnotation param = annotations.getWebParamAnnotation(
method, p);
if (param != null
&& (param.getMode() == WebParamAnnotation.MODE_INOUT || param
.getMode() == WebParamAnnotation.MODE_OUT)) {
throwException(
"Method annotated with @OneWay annotation cannot have INOUT/OUT parameters",
"4.3.1", "Method : "+ method.getName());
}
}
}
protected void validateMethod(Method method) throws AnnotationException {
if (!Modifier.isPublic(method.getModifiers())) {
throwException("Method annotated with @WebMethod must be public",
"3.3", "Method : "+ method.getName());
}
if (annotations.hasOnewayAnnotation(method)) {
validateOneWay(method);
}
/*for (int p = 0; p < method.getParameterTypes().length; p++) {
WebParamAnnotation param = annotations.getWebParamAnnotation(
method, p);
if (param != null
&& (param.getMode() == WebParamAnnotation.MODE_INOUT || param
.getMode() == WebParamAnnotation.MODE_OUT)
&& !param.isHeader()) {
throwException(
"Parameters with mode INOUT/OUT must be marked as header=true",
"4.4.1","Method : "+ method.getName());
}
}*/
}
public void validate(WebAnnotations annotations, Class clazz)
throws AnnotationException {
this.annotations = annotations;
validateClass(clazz);
}
private void throwException(String message, String spec, String details)
throws AnnotationException {
throw new AnnotationException(message + ". JSR181 spec : " + spec + ". "+ details);
}
}
| +: more validations
git-svn-id: 9326b53cbc4a8f4c3d02979b62b178127d5150fe@2175 c7d0bf07-ec0d-0410-b2cc-d48fa9be22ba
| xfire-annotations/src/main/org/codehaus/xfire/annotations/AnnotationsValidatorImpl.java | +: more validations | <ide><path>fire-annotations/src/main/org/codehaus/xfire/annotations/AnnotationsValidatorImpl.java
<ide> private WebAnnotations annotations;
<ide>
<ide> protected void validateClass(Class clazz) throws AnnotationException {
<add> int modz = clazz.getModifiers();
<add> if (!clazz.isInterface()
<add> && (Modifier.isAbstract(modz) || Modifier.isFinal(modz))) {
<add> throwException("Webservice class can't be abstract or final",
<add> "4.1.1", "Class : " + clazz.getName());
<add> }
<add>
<ide> WebServiceAnnotation webAnn = annotations
<ide> .getWebServiceAnnotation(clazz);
<ide> if (webAnn.getPortName() != null && webAnn.getPortName().length() > 0
<ide> && clazz.isInterface()) {
<del> throwException("Port name can't be defined on interface", "4.1.1", "Class : "+clazz.getName());
<add> throwException("Port name can't be defined on interface", "4.1.1",
<add> "Class : " + clazz.getName());
<ide> }
<ide>
<ide> if (webAnn.getEndpointInterface() != null
<ide> && webAnn.getEndpointInterface().length() > 0
<ide> && clazz.isInterface()) {
<ide> throwException("EndpointInterface can't be defined on interface",
<del> "4.1.1", " Class : "+clazz.getName());
<add> "4.1.1", " Class : " + clazz.getName());
<add> }
<add>
<add> if (annotations.hasHandlerChainAnnotation(clazz)) {
<add> throwException(
<add> "Annotations @HandlerChain is NOT supported. Use @InHandlers instread",
<add> "0.0", "Class : " + clazz.getName());
<add>
<ide> }
<ide>
<ide> Method[] methods = clazz.getMethods();
<ide>
<ide> }
<ide>
<add> /**
<add> * @param method
<add> * @throws AnnotationException
<add> */
<ide> protected void validateOneWay(Method method) throws AnnotationException {
<ide>
<ide> if (method.getExceptionTypes().length > 0
<ide> || !method.getGenericReturnType().equals(void.class)) {
<ide> throwException(
<ide> "Method annotated with @OneWay annotation cannot have return value or declarated exceptions",
<del> "4.3.1","Method : "+ method.getName());
<add> "4.3.1", "Method : " + method.getName());
<ide> }
<ide> for (int p = 0; p < method.getParameterTypes().length; p++) {
<ide> WebParamAnnotation param = annotations.getWebParamAnnotation(
<ide> .getMode() == WebParamAnnotation.MODE_OUT)) {
<ide> throwException(
<ide> "Method annotated with @OneWay annotation cannot have INOUT/OUT parameters",
<del> "4.3.1", "Method : "+ method.getName());
<add> "4.3.1", "Method : " + method.getName());
<ide> }
<ide> }
<ide>
<ide> }
<ide>
<add> /**
<add> * @param method
<add> * @throws AnnotationException
<add> */
<ide> protected void validateMethod(Method method) throws AnnotationException {
<ide>
<ide> if (!Modifier.isPublic(method.getModifiers())) {
<ide> throwException("Method annotated with @WebMethod must be public",
<del> "3.3", "Method : "+ method.getName());
<add> "3.3", "Method : " + method.getName());
<ide> }
<ide>
<ide> if (annotations.hasOnewayAnnotation(method)) {
<ide> validateOneWay(method);
<ide> }
<ide>
<del> /*for (int p = 0; p < method.getParameterTypes().length; p++) {
<del> WebParamAnnotation param = annotations.getWebParamAnnotation(
<del> method, p);
<del> if (param != null
<del> && (param.getMode() == WebParamAnnotation.MODE_INOUT || param
<del> .getMode() == WebParamAnnotation.MODE_OUT)
<del> && !param.isHeader()) {
<del> throwException(
<del> "Parameters with mode INOUT/OUT must be marked as header=true",
<del> "4.4.1","Method : "+ method.getName());
<del> }
<del> }*/
<add> if (void.class.equals(method.getReturnType())
<add> && annotations.hasWebResultAnnotation(method)) {
<add> throwException(
<add> "Method with result can't have @WebResult annotations",
<add> "0.0", "Method : " + method.getName());
<add> }
<add> /*
<add> * for (int p = 0; p < method.getParameterTypes().length; p++) {
<add> * WebParamAnnotation param = annotations.getWebParamAnnotation( method,
<add> * p); if (param != null && (param.getMode() ==
<add> * WebParamAnnotation.MODE_INOUT || param .getMode() ==
<add> * WebParamAnnotation.MODE_OUT) && !param.isHeader()) { throwException(
<add> * "Parameters with mode INOUT/OUT must be marked as header=true",
<add> * "4.4.1","Method : "+ method.getName()); } }
<add> */
<ide>
<ide> }
<ide>
<ide>
<ide> private void throwException(String message, String spec, String details)
<ide> throws AnnotationException {
<del> throw new AnnotationException(message + ". JSR181 spec : " + spec + ". "+ details);
<add> throw new AnnotationException(message + ". JSR181 spec : " + spec
<add> + ". " + details);
<ide> }
<ide> } |
|
Java | apache-2.0 | error: pathspec 'FootballScores/app/src/main/java/barqsoft/footballscores/logger/Debug.java' did not match any file(s) known to git
| b699106035aeb769ad14111b6aaf5f1ea5774c07 | 1 | thecodegame/udacity-p3,thecodegame/udacity-p3 | package barqsoft.footballscores.logger;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
/**
* Custom utility class for debugging & logging purpose
* Created by Amrendra Kumar on 27/01/16.
*/
public class Debug {
private static final Boolean DEBUG = true;
private static final String TAG = "football";
private static String getMsg(String msg) {
if (DEBUG) {
StackTraceElement strace = Thread.currentThread().getStackTrace()[4];
// String fileName = strace.getFileName();
String className = strace.getClassName();
String methodName = strace.getMethodName();
int line = strace.getLineNumber();
msg = className + "::" + methodName + "() [" + line + "]" + " : " + msg;
}
return msg;
}
public static void c() {
if (DEBUG) {
Log.e(TAG, getMsg("CHECK"));
}
}
public static void e(String msg, Boolean show) {
if (DEBUG || show) {
Log.e(TAG, getMsg(msg));
}
}
public static void d(String msg, Boolean show) {
if (DEBUG || show) {
Log.d(TAG, getMsg(msg));
}
}
public static void i(String msg, Boolean show) {
if (DEBUG || show) {
Log.i(TAG, getMsg(msg));
}
}
public static void w(String msg, Boolean show) {
if (DEBUG || show) {
Log.w(TAG, getMsg(msg));
}
}
public static void v(String msg, Boolean show) {
if (DEBUG || show) {
Log.v(TAG, getMsg(msg));
}
}
public static void showSnackbarLong(View view, String str) {
Snackbar.make(view, str, Snackbar.LENGTH_LONG).show();
}
public static void showSnackbarShort(View view, String str) {
Snackbar.make(view, str, Snackbar.LENGTH_SHORT).show();
}
public static void showSnackbar(View view, String str, int len) {
Snackbar.make(view, str, len).show();
}
public static void showToastShort(String msg, Context context) {
showToastShort(msg, context, false);
}
public static void showToastLong(String msg, Context context) {
showToastLong(msg, context, false);
}
public static void showToastShort(String msg, Context context, Boolean show) {
if (DEBUG || show) {
if (context == null) {
Log.e(TAG, getMsg("Context is null. Trying to show toast. Msg: " + msg));
} else {
Toast.makeText(context, msg, Toast.LENGTH_SHORT).show();
}
}
}
public static void showToastLong(String msg, Context context, Boolean show) {
if (DEBUG || show) {
if (context == null) {
Log.e(TAG, getMsg("Context is null. Trying to show toast. Msg: " + msg));
} else {
Toast.makeText(context, msg, Toast.LENGTH_LONG).show();
}
}
}
public static void array(Object[] arr) {
if (DEBUG) {
Log.i(TAG, getMsg(Arrays.deepToString(arr)));
}
}
public static void object(Object obj, String desc) {
if (DEBUG) {
if (obj == null) {
Log.e(TAG, getMsg("Error : " + desc + " is NULL"));
} else {
Log.e(TAG, getMsg(obj.toString()));
}
}
}
private static String debugObject(String key, Object obj) {
if (obj == null) {
return String.format("%s = <null>%n", key);
}
return String.format("%s = %s (%s)%n", key, String.valueOf(obj), obj.getClass()
.getSimpleName());
}
public static void bundle(Bundle bundle) {
if (DEBUG) {
if (bundle != null) {
StringBuilder sb = new StringBuilder();
sb.append("Bundle{\n");
for (String key : bundle.keySet()) {
sb.append(debugObject(key, bundle.get(key)));
}
sb.append("}");
Log.d(TAG, getMsg(sb.toString()));
} else {
Log.e(TAG, getMsg("Error : Expected Bundle is null"));
}
}
}
public static void intent(Intent intent) {
if (DEBUG) {
if (intent != null) {
bundle(intent.getExtras());
} else {
Log.d(TAG, getMsg("Error : Expected Intent is null"));
}
}
}
public static void preferences(SharedPreferences prefs) {
if (DEBUG) {
if (prefs != null) {
StringBuilder sb = new StringBuilder();
sb.append("SharedPreferences{\n");
for (Map.Entry<String, ?> entry : prefs.getAll().entrySet()) {
sb.append(debugObject(entry.getKey(), entry.getValue()));
}
sb.append("}");
Log.d(TAG, getMsg(sb.toString()));
} else {
Log.e(TAG, getMsg("Error : Expected SharedPreferences is null"));
}
}
}
public static void JSONObject(JSONObject jsonObject) {
if (DEBUG) {
if (jsonObject != null) {
StringBuilder sb = new StringBuilder();
sb.append("JsonObject{\n");
Iterator<String> s = jsonObject.keys();
while (s.hasNext()) {
String key = s.next();
try {
sb.append(debugObject(key, jsonObject.get(key)));
} catch (JSONException e) {
e.printStackTrace();
}
}
sb.append("}");
Log.i(TAG, getMsg(sb.toString()));
} else {
Log.e(TAG, getMsg("Error : Expected JSONObject is null"));
}
}
}
}
| FootballScores/app/src/main/java/barqsoft/footballscores/logger/Debug.java | Add debug logger class
| FootballScores/app/src/main/java/barqsoft/footballscores/logger/Debug.java | Add debug logger class | <ide><path>ootballScores/app/src/main/java/barqsoft/footballscores/logger/Debug.java
<add>package barqsoft.footballscores.logger;
<add>
<add>import android.content.Context;
<add>import android.content.Intent;
<add>import android.content.SharedPreferences;
<add>import android.os.Bundle;
<add>import android.support.design.widget.Snackbar;
<add>import android.util.Log;
<add>import android.view.View;
<add>import android.widget.Toast;
<add>
<add>import org.json.JSONException;
<add>import org.json.JSONObject;
<add>
<add>import java.util.Arrays;
<add>import java.util.Iterator;
<add>import java.util.Map;
<add>
<add>/**
<add> * Custom utility class for debugging & logging purpose
<add> * Created by Amrendra Kumar on 27/01/16.
<add> */
<add>public class Debug {
<add> private static final Boolean DEBUG = true;
<add> private static final String TAG = "football";
<add>
<add> private static String getMsg(String msg) {
<add> if (DEBUG) {
<add> StackTraceElement strace = Thread.currentThread().getStackTrace()[4];
<add> // String fileName = strace.getFileName();
<add> String className = strace.getClassName();
<add> String methodName = strace.getMethodName();
<add> int line = strace.getLineNumber();
<add> msg = className + "::" + methodName + "() [" + line + "]" + " : " + msg;
<add> }
<add> return msg;
<add> }
<add>
<add> public static void c() {
<add> if (DEBUG) {
<add> Log.e(TAG, getMsg("CHECK"));
<add> }
<add> }
<add>
<add> public static void e(String msg, Boolean show) {
<add> if (DEBUG || show) {
<add> Log.e(TAG, getMsg(msg));
<add> }
<add> }
<add>
<add> public static void d(String msg, Boolean show) {
<add> if (DEBUG || show) {
<add> Log.d(TAG, getMsg(msg));
<add> }
<add> }
<add>
<add> public static void i(String msg, Boolean show) {
<add> if (DEBUG || show) {
<add> Log.i(TAG, getMsg(msg));
<add> }
<add> }
<add>
<add> public static void w(String msg, Boolean show) {
<add> if (DEBUG || show) {
<add> Log.w(TAG, getMsg(msg));
<add> }
<add> }
<add>
<add> public static void v(String msg, Boolean show) {
<add> if (DEBUG || show) {
<add> Log.v(TAG, getMsg(msg));
<add> }
<add> }
<add>
<add> public static void showSnackbarLong(View view, String str) {
<add> Snackbar.make(view, str, Snackbar.LENGTH_LONG).show();
<add> }
<add>
<add> public static void showSnackbarShort(View view, String str) {
<add> Snackbar.make(view, str, Snackbar.LENGTH_SHORT).show();
<add> }
<add>
<add> public static void showSnackbar(View view, String str, int len) {
<add> Snackbar.make(view, str, len).show();
<add> }
<add>
<add> public static void showToastShort(String msg, Context context) {
<add> showToastShort(msg, context, false);
<add> }
<add>
<add> public static void showToastLong(String msg, Context context) {
<add> showToastLong(msg, context, false);
<add> }
<add>
<add> public static void showToastShort(String msg, Context context, Boolean show) {
<add> if (DEBUG || show) {
<add> if (context == null) {
<add> Log.e(TAG, getMsg("Context is null. Trying to show toast. Msg: " + msg));
<add> } else {
<add> Toast.makeText(context, msg, Toast.LENGTH_SHORT).show();
<add> }
<add> }
<add> }
<add>
<add> public static void showToastLong(String msg, Context context, Boolean show) {
<add> if (DEBUG || show) {
<add> if (context == null) {
<add> Log.e(TAG, getMsg("Context is null. Trying to show toast. Msg: " + msg));
<add> } else {
<add> Toast.makeText(context, msg, Toast.LENGTH_LONG).show();
<add> }
<add> }
<add> }
<add>
<add> public static void array(Object[] arr) {
<add> if (DEBUG) {
<add> Log.i(TAG, getMsg(Arrays.deepToString(arr)));
<add> }
<add> }
<add>
<add> public static void object(Object obj, String desc) {
<add> if (DEBUG) {
<add> if (obj == null) {
<add> Log.e(TAG, getMsg("Error : " + desc + " is NULL"));
<add> } else {
<add> Log.e(TAG, getMsg(obj.toString()));
<add> }
<add> }
<add> }
<add>
<add> private static String debugObject(String key, Object obj) {
<add> if (obj == null) {
<add> return String.format("%s = <null>%n", key);
<add> }
<add> return String.format("%s = %s (%s)%n", key, String.valueOf(obj), obj.getClass()
<add> .getSimpleName());
<add> }
<add>
<add> public static void bundle(Bundle bundle) {
<add> if (DEBUG) {
<add> if (bundle != null) {
<add> StringBuilder sb = new StringBuilder();
<add> sb.append("Bundle{\n");
<add> for (String key : bundle.keySet()) {
<add> sb.append(debugObject(key, bundle.get(key)));
<add> }
<add> sb.append("}");
<add> Log.d(TAG, getMsg(sb.toString()));
<add> } else {
<add> Log.e(TAG, getMsg("Error : Expected Bundle is null"));
<add> }
<add> }
<add> }
<add>
<add> public static void intent(Intent intent) {
<add> if (DEBUG) {
<add> if (intent != null) {
<add> bundle(intent.getExtras());
<add> } else {
<add> Log.d(TAG, getMsg("Error : Expected Intent is null"));
<add> }
<add> }
<add> }
<add>
<add> public static void preferences(SharedPreferences prefs) {
<add> if (DEBUG) {
<add> if (prefs != null) {
<add> StringBuilder sb = new StringBuilder();
<add> sb.append("SharedPreferences{\n");
<add> for (Map.Entry<String, ?> entry : prefs.getAll().entrySet()) {
<add> sb.append(debugObject(entry.getKey(), entry.getValue()));
<add> }
<add> sb.append("}");
<add> Log.d(TAG, getMsg(sb.toString()));
<add> } else {
<add> Log.e(TAG, getMsg("Error : Expected SharedPreferences is null"));
<add> }
<add> }
<add> }
<add>
<add> public static void JSONObject(JSONObject jsonObject) {
<add> if (DEBUG) {
<add> if (jsonObject != null) {
<add> StringBuilder sb = new StringBuilder();
<add> sb.append("JsonObject{\n");
<add> Iterator<String> s = jsonObject.keys();
<add> while (s.hasNext()) {
<add> String key = s.next();
<add> try {
<add> sb.append(debugObject(key, jsonObject.get(key)));
<add> } catch (JSONException e) {
<add> e.printStackTrace();
<add> }
<add> }
<add> sb.append("}");
<add> Log.i(TAG, getMsg(sb.toString()));
<add> } else {
<add> Log.e(TAG, getMsg("Error : Expected JSONObject is null"));
<add> }
<add> }
<add> }
<add>} |
|
Java | mit | 3a004fcfd16deee8fd726f7007f39cc302e5b0c1 | 0 | rayram23/webcrawler,rayram23/webcrawler,rayram23/webcrawler | package com.rayram23.webcrawler;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.rayram23.webcrawler.domain.Page;
import com.rayram23.webcrawler.fetch.PageFetchListener;
import com.rayram23.webcrawler.fetch.PageFetcherPool;
import com.rayram23.webcrawler.parser.UriParser;
import com.rayram23.webcrawler.sitemap.SiteMap;
public class WebCrawler implements PageFetchListener{
private SiteMap siteMap;
private URI uri;
private Logger logger = Logger.getLogger("WebCrawler");
private UriParser uriParser;
private PageFetcherPool pool;
private Set<String> seen;
public WebCrawler(URI uri, SiteMap siteMap, UriParser uriParser, PageFetcherPool pool){
if(siteMap == null){
throw new IllegalArgumentException("Please pass a valid SiteMap object.");
}
this.siteMap = siteMap;
this.uriParser = uriParser;
this.pool = pool;
this.uri = uri;
this.seen = new HashSet<String>();
}
public void start() throws MalformedURLException{
String startLink = this.uri.toURL().toString();
this.seen.add(startLink);
this.pool.fetchPage(new Page(startLink));
}
public void pageFetched(Page page, Set<String> links, Set<String> images, Set<String> statics) {
//add the page to the graph
logger.log(Level.INFO, "Page crawled: "+page.getUrl());
this.siteMap.addPage(page);
for(String link : links){
//if the link is "seen" then some other page had a link to it.
//but it may have not been explotred yet we dont need to add
//this link to the queue since it may already be there...
//possible race condition (if one thead is adding the link while to other is checking
//that the link exists in the seen set)
boolean seen = this.seen.contains(link);
if(seen){
continue;
}
URI other = this.convertToURI(link);
if(other == null){
//logger.log(Level.WARNING, "Link is not a URI: "+link);
continue;
}
boolean onDomain = this.uriParser.isUrlOnDomain(this.uri, other);
if(!onDomain){
//logger.log(Level.INFO, "Link is not on domain: "+link);
continue;
}
this.seen.add(link);
this.pool.fetchPage(new Page(link));
}
}
protected URI convertToURI(String link){
URI out = null;
try{
out = new URI(link);
}
catch(URISyntaxException e){
}
return out;
}
}
| src/main/java/com/rayram23/webcrawler/WebCrawler.java | package com.rayram23.webcrawler;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.rayram23.webcrawler.domain.Page;
import com.rayram23.webcrawler.fetch.PageFetchListener;
import com.rayram23.webcrawler.fetch.PageFetcherPool;
import com.rayram23.webcrawler.parser.UriParser;
import com.rayram23.webcrawler.sitemap.SiteMap;
public class WebCrawler implements PageFetchListener{
private SiteMap siteMap;
private URI uri;
private Logger logger = Logger.getLogger("WebCrawler");
private UriParser uriParser;
private PageFetcherPool pool;
private Set<String> seen;
public WebCrawler(URI uri, SiteMap siteMap, UriParser uriParser, PageFetcherPool pool){
if(siteMap == null){
throw new IllegalArgumentException("Please pass a valid SiteMap object.");
}
this.siteMap = siteMap;
this.uriParser = uriParser;
this.pool = pool;
this.uri = uri;
this.seen = new HashSet<String>();
}
public void start() throws MalformedURLException{
String startLink = this.uri.toURL().toString();
this.seen.add(startLink);
this.pool.fetchPage(new Page(startLink));
}
public void pageFetched(Page page, Set<String> links, Set<String> images, Set<String> statics) {
//add the page to the graph
logger.log(Level.INFO, "Page crawled: "+page.getUrl());
this.siteMap.addPage(page);
for(String link : links){
//if the link is "seen" then some other page had a link to it
//but i may have not been explotred yet we dont need to add
//this link to the queue sine it may already be there...
//possible race condition (if one thead is adding the link while to other is checking
//that the link exists in the seen set)
boolean seen = this.seen.contains(link);
if(seen){
continue;
}
URI other = this.convertToURI(link);
if(other == null){
//logger.log(Level.WARNING, "Link is not a URI: "+link);
continue;
}
boolean onDomain = this.uriParser.isUrlOnDomain(this.uri, other);
if(!onDomain){
//logger.log(Level.INFO, "Link is not on domain: "+link);
continue;
}
this.seen.add(link);
this.pool.fetchPage(new Page(link));
}
}
protected URI convertToURI(String link){
URI out = null;
try{
out = new URI(link);
}
catch(URISyntaxException e){
}
return out;
}
}
| fixed some comments
| src/main/java/com/rayram23/webcrawler/WebCrawler.java | fixed some comments | <ide><path>rc/main/java/com/rayram23/webcrawler/WebCrawler.java
<ide>
<ide> this.siteMap.addPage(page);
<ide> for(String link : links){
<del> //if the link is "seen" then some other page had a link to it
<del> //but i may have not been explotred yet we dont need to add
<del> //this link to the queue sine it may already be there...
<add> //if the link is "seen" then some other page had a link to it.
<add> //but it may have not been explotred yet we dont need to add
<add> //this link to the queue since it may already be there...
<ide> //possible race condition (if one thead is adding the link while to other is checking
<ide> //that the link exists in the seen set)
<ide> boolean seen = this.seen.contains(link); |
|
Java | apache-2.0 | 3d875f22e05c9549ba0e417d350f5b49cc747681 | 0 | cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.catalog;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hive.hbase.HBaseSerDe;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.log4j.Logger;
import org.apache.impala.common.Pair;
import org.apache.impala.thrift.TCatalogObjectType;
import org.apache.impala.thrift.TColumn;
import org.apache.impala.thrift.THBaseTable;
import org.apache.impala.thrift.TResultSet;
import org.apache.impala.thrift.TResultSetMetadata;
import org.apache.impala.thrift.TTable;
import org.apache.impala.thrift.TTableDescriptor;
import org.apache.impala.thrift.TTableType;
import org.apache.impala.util.StatsHelper;
import org.apache.impala.util.TResultRowBuilder;
import com.codahale.metrics.Timer;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Impala representation of HBase table metadata,
* as loaded from Hive's metastore.
* This implies that we inherit the metastore's limitations related to HBase,
* for example the lack of support for composite HBase row keys.
* We sort the HBase columns (cols) by family/qualifier
* to simplify the retrieval logic in the backend, since
* HBase returns data ordered by family/qualifier.
* This implies that a "select *"-query on an HBase table
* will not have the columns ordered as they were declared in the DDL.
* They will be ordered by family/qualifier.
*
*/
public class HBaseTable extends Table {
// Maximum deviation from the average to stop querying more regions
// to estimate the row count
private static final double DELTA_FROM_AVERAGE = 0.15;
private static final Logger LOG = Logger.getLogger(HBaseTable.class);
// Copied from Hive's HBaseStorageHandler.java.
public static final String DEFAULT_PREFIX = "default.";
// Number of rows fetched during the row count estimation per region
public static final int ROW_COUNT_ESTIMATE_BATCH_SIZE = 10;
// Minimum number of regions that are checked to estimate the row count
private static final int MIN_NUM_REGIONS_TO_CHECK = 5;
// Name of table in HBase.
// 'this.name' is the alias of the HBase table in Hive.
protected String hbaseTableName_;
// Input format class for HBase tables read by Hive.
private static final String HBASE_INPUT_FORMAT =
"org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat";
// Serialization class for HBase tables set in the corresponding Metastore table.
private static final String HBASE_SERIALIZATION_LIB =
"org.apache.hadoop.hive.hbase.HBaseSerDe";
// Storage handler class for HBase tables read by Hive.
private static final String HBASE_STORAGE_HANDLER =
"org.apache.hadoop.hive.hbase.HBaseStorageHandler";
// Column family of HBase row key
private static final String ROW_KEY_COLUMN_FAMILY = ":key";
// Keep the conf around
private final static Configuration hbaseConf_ = HBaseConfiguration.create();
// Cached column families. Used primarily for speeding up row stats estimation
// (see IMPALA-4211).
private HColumnDescriptor[] columnFamilies_ = null;
protected HBaseTable(org.apache.hadoop.hive.metastore.api.Table msTbl,
Db db, String name, String owner) {
super(msTbl, db, name, owner);
}
/**
* Connection instances are expensive to create. The HBase documentation recommends
* one and then sharing it among threads. All operations on a connection are
* thread-safe.
*/
private static class ConnectionHolder {
private static Connection connection_ = null;
public static synchronized Connection getConnection(Configuration conf)
throws IOException {
if (connection_ == null || connection_.isClosed()) {
connection_ = ConnectionFactory.createConnection(conf);
}
return connection_;
}
}
/**
* Table client objects are thread-unsafe and cheap to create. The HBase docs recommend
* creating a new one for each task and then closing when done.
*/
public org.apache.hadoop.hbase.client.Table getHBaseTable() throws IOException {
return ConnectionHolder.getConnection(hbaseConf_)
.getTable(TableName.valueOf(hbaseTableName_));
}
private void closeHBaseTable(org.apache.hadoop.hbase.client.Table table) {
try {
table.close();
} catch (IOException e) {
LOG.error("Error closing HBase table: " + hbaseTableName_, e);
}
}
/**
* Get the cluster status, making sure we close the admin client afterwards.
*/
public ClusterStatus getClusterStatus() throws IOException {
Admin admin = null;
ClusterStatus clusterStatus = null;
try {
Connection connection = ConnectionHolder.getConnection(hbaseConf_);
admin = connection.getAdmin();
clusterStatus = admin.getClusterStatus();
} finally {
if (admin != null) admin.close();
}
return clusterStatus;
}
/**
* Parse the column description string to the column families and column
* qualifies. This is a copy of HBaseSerDe.parseColumnMapping and
* parseColumnStorageTypes with parts we don't use removed. The hive functions
* are not public.
* tableDefaultStorageIsBinary - true if table is default to binary encoding
* columnsMappingSpec - input string format describing the table
* fieldSchemas - input field schema from metastore table
* columnFamilies/columnQualifiers/columnBinaryEncodings - out parameters that will be
* filled with the column family, column qualifier and encoding for each column.
*/
private void parseColumnMapping(boolean tableDefaultStorageIsBinary,
String columnsMappingSpec, List<FieldSchema> fieldSchemas,
List<String> columnFamilies, List<String> columnQualifiers,
List<Boolean> colIsBinaryEncoded) throws SerDeException {
if (columnsMappingSpec == null) {
throw new SerDeException(
"Error: hbase.columns.mapping missing for this HBase table.");
}
if (columnsMappingSpec.equals("") ||
columnsMappingSpec.equals(HBaseSerDe.HBASE_KEY_COL)) {
throw new SerDeException("Error: hbase.columns.mapping specifies only "
+ "the HBase table row key. A valid Hive-HBase table must specify at "
+ "least one additional column.");
}
int rowKeyIndex = -1;
String[] columnSpecs = columnsMappingSpec.split(",");
// If there was an implicit key column mapping, the number of columns (fieldSchemas)
// will be one more than the number of column mapping specs.
int fsStartIdxOffset = fieldSchemas.size() - columnSpecs.length;
if (fsStartIdxOffset != 0 && fsStartIdxOffset != 1) {
// This should never happen - Hive blocks creating a mismatched table and both Hive
// and Impala currently block all column-level DDL on HBase tables.
throw new SerDeException(String.format("Number of entries in " +
"'hbase.columns.mapping' does not match the number of columns in the " +
"table: %d != %d (counting the key if implicit)",
columnSpecs.length, fieldSchemas.size()));
}
for (int i = 0; i < columnSpecs.length; ++i) {
String mappingSpec = columnSpecs[i];
String[] mapInfo = mappingSpec.split("#");
// Trim column info so that serdeproperties with new lines still parse correctly.
String colInfo = mapInfo[0].trim();
int idxFirst = colInfo.indexOf(":");
int idxLast = colInfo.lastIndexOf(":");
if (idxFirst < 0 || !(idxFirst == idxLast)) {
throw new SerDeException("Error: the HBase columns mapping contains a "
+ "badly formed column family, column qualifier specification.");
}
if (colInfo.equals(HBaseSerDe.HBASE_KEY_COL)) {
Preconditions.checkState(fsStartIdxOffset == 0);
rowKeyIndex = i;
columnFamilies.add(colInfo);
columnQualifiers.add(null);
} else {
String[] parts = colInfo.split(":");
Preconditions.checkState(parts.length > 0 && parts.length <= 2);
columnFamilies.add(parts[0]);
if (parts.length == 2) {
columnQualifiers.add(parts[1]);
} else {
columnQualifiers.add(null);
}
}
// Set column binary encoding
FieldSchema fieldSchema = fieldSchemas.get(i + fsStartIdxOffset);
boolean supportsBinaryEncoding = supportsBinaryEncoding(fieldSchema);
if (mapInfo.length == 1) {
// There is no column level storage specification. Use the table storage spec.
colIsBinaryEncoded.add(
new Boolean(tableDefaultStorageIsBinary && supportsBinaryEncoding));
} else if (mapInfo.length == 2) {
// There is a storage specification for the column
String storageOption = mapInfo[1];
if (!(storageOption.equals("-") || "string".startsWith(storageOption) || "binary"
.startsWith(storageOption))) {
throw new SerDeException("Error: A column storage specification is one of"
+ " the following: '-', a prefix of 'string', or a prefix of 'binary'. "
+ storageOption + " is not a valid storage option specification for "
+ fieldSchema.getName());
}
boolean isBinaryEncoded = false;
if ("-".equals(storageOption)) {
isBinaryEncoded = tableDefaultStorageIsBinary;
} else if ("binary".startsWith(storageOption)) {
isBinaryEncoded = true;
}
if (isBinaryEncoded && !supportsBinaryEncoding) {
// Use string encoding and log a warning if the column spec is binary but the
// column type does not support it.
// TODO: Hive/HBase does not raise an exception, but should we?
LOG.warn("Column storage specification for column " + fieldSchema.getName()
+ " is binary" + " but the column type " + fieldSchema.getType() +
" does not support binary encoding. Fallback to string format.");
isBinaryEncoded = false;
}
colIsBinaryEncoded.add(isBinaryEncoded);
} else {
// error in storage specification
throw new SerDeException("Error: " + HBaseSerDe.HBASE_COLUMNS_MAPPING
+ " storage specification " + mappingSpec + " is not valid for column: "
+ fieldSchema.getName());
}
}
if (rowKeyIndex == -1) {
columnFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
columnQualifiers.add(0, null);
colIsBinaryEncoded.add(0,
supportsBinaryEncoding(fieldSchemas.get(0)) && tableDefaultStorageIsBinary);
}
}
private boolean supportsBinaryEncoding(FieldSchema fs) {
try {
Type colType = parseColumnType(fs);
// Only boolean, integer and floating point types can use binary storage.
return colType.isBoolean() || colType.isIntegerType()
|| colType.isFloatingPointType();
} catch (TableLoadingException e) {
return false;
}
}
@Override
/**
* For hbase tables, we can support tables with columns we don't understand at
* all (e.g. map) as long as the user does not select those. This is in contrast
* to hdfs tables since we typically need to understand all columns to make sense
* of the file at all.
*/
public void load(boolean reuseMetadata, IMetaStoreClient client,
org.apache.hadoop.hive.metastore.api.Table msTbl) throws TableLoadingException {
Preconditions.checkNotNull(getMetaStoreTable());
final Timer.Context context =
getMetrics().getTimer(Table.LOAD_DURATION_METRIC).time();
try {
msTable_ = msTbl;
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
// Warm up the connection and verify the table exists.
getHBaseTable().close();
columnFamilies_ = null;
Map<String, String> serdeParams =
getMetaStoreTable().getSd().getSerdeInfo().getParameters();
String hbaseColumnsMapping = serdeParams.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
if (hbaseColumnsMapping == null) {
throw new MetaException("No hbase.columns.mapping defined in Serde.");
}
String hbaseTableDefaultStorageType = getMetaStoreTable().getParameters().get(
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE);
boolean tableDefaultStorageIsBinary = false;
if (hbaseTableDefaultStorageType != null &&
!hbaseTableDefaultStorageType.isEmpty()) {
if (hbaseTableDefaultStorageType.equalsIgnoreCase("binary")) {
tableDefaultStorageIsBinary = true;
} else if (!hbaseTableDefaultStorageType.equalsIgnoreCase("string")) {
throw new SerDeException("Error: " +
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE +
" parameter must be specified as" +
" 'string' or 'binary'; '" + hbaseTableDefaultStorageType +
"' is not a valid specification for this table/serde property.");
}
}
// Parse HBase column-mapping string.
List<FieldSchema> fieldSchemas = getMetaStoreTable().getSd().getCols();
List<String> hbaseColumnFamilies = new ArrayList<String>();
List<String> hbaseColumnQualifiers = new ArrayList<String>();
List<Boolean> hbaseColumnBinaryEncodings = new ArrayList<Boolean>();
parseColumnMapping(tableDefaultStorageIsBinary, hbaseColumnsMapping, fieldSchemas,
hbaseColumnFamilies, hbaseColumnQualifiers, hbaseColumnBinaryEncodings);
Preconditions.checkState(
hbaseColumnFamilies.size() == hbaseColumnQualifiers.size());
Preconditions.checkState(fieldSchemas.size() == hbaseColumnFamilies.size());
// Populate tmp cols in the order they appear in the Hive metastore.
// We will reorder the cols below.
List<HBaseColumn> tmpCols = Lists.newArrayList();
// Store the key column separately.
// TODO: Change this to an ArrayList once we support composite row keys.
HBaseColumn keyCol = null;
for (int i = 0; i < fieldSchemas.size(); ++i) {
FieldSchema s = fieldSchemas.get(i);
Type t = Type.INVALID;
try {
t = parseColumnType(s);
} catch (TableLoadingException e) {
// Ignore hbase types we don't support yet. We can load the metadata
// but won't be able to select from it.
}
HBaseColumn col = new HBaseColumn(s.getName(), hbaseColumnFamilies.get(i),
hbaseColumnQualifiers.get(i), hbaseColumnBinaryEncodings.get(i),
t, s.getComment(), -1);
if (col.getColumnFamily().equals(ROW_KEY_COLUMN_FAMILY)) {
// Store the row key column separately from the rest
keyCol = col;
} else {
tmpCols.add(col);
}
}
Preconditions.checkState(keyCol != null);
// The backend assumes that the row key column is always first and
// that the remaining HBase columns are ordered by columnFamily,columnQualifier,
// so the final position depends on the other mapped HBase columns.
// Sort columns and update positions.
Collections.sort(tmpCols);
clearColumns();
keyCol.setPosition(0);
addColumn(keyCol);
// Update the positions of the remaining columns
for (int i = 0; i < tmpCols.size(); ++i) {
HBaseColumn col = tmpCols.get(i);
col.setPosition(i + 1);
addColumn(col);
}
// Set table stats.
setTableStats(msTable_);
// since we don't support composite hbase rowkeys yet, all hbase tables have a
// single clustering col
numClusteringCols_ = 1;
loadAllColumnStats(client);
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table: " +
name_, e);
} finally {
context.stop();
}
}
@Override
protected void loadFromThrift(TTable table) throws TableLoadingException {
super.loadFromThrift(table);
try {
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
// Warm up the connection and verify the table exists.
getHBaseTable().close();
columnFamilies_ = null;
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table from " +
"thrift table: " + name_, e);
}
}
/**
* This method is completely copied from Hive's HBaseStorageHandler.java.
*/
private String getHBaseTableName(org.apache.hadoop.hive.metastore.api.Table tbl) {
// Give preference to TBLPROPERTIES over SERDEPROPERTIES
// (really we should only use TBLPROPERTIES, so this is just
// for backwards compatibility with the original specs).
String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME);
if (tableName == null) {
tableName = tbl.getSd().getSerdeInfo().getParameters().get(
HBaseSerDe.HBASE_TABLE_NAME);
}
if (tableName == null) {
tableName = tbl.getDbName() + "." + tbl.getTableName();
if (tableName.startsWith(DEFAULT_PREFIX)) {
tableName = tableName.substring(DEFAULT_PREFIX.length());
}
}
return tableName;
}
/**
* Estimates the number of rows for a single region and returns a pair with
* the estimated row count and the estimated size in bytes per row.
*/
private Pair<Long, Long> getEstimatedRowStatsForRegion(HRegionLocation location,
boolean isCompressed, ClusterStatus clusterStatus) throws IOException {
HRegionInfo info = location.getRegionInfo();
Scan s = new Scan(info.getStartKey());
// Get a small sample of rows
s.setBatch(ROW_COUNT_ESTIMATE_BATCH_SIZE);
// Try and get every version so the row's size can be used to estimate.
s.setMaxVersions(Short.MAX_VALUE);
// Don't cache the blocks as we don't think these are
// necessarily important blocks.
s.setCacheBlocks(false);
// Try and get deletes too so their size can be counted.
s.setRaw(false);
org.apache.hadoop.hbase.client.Table table = getHBaseTable();
ResultScanner rs = table.getScanner(s);
long currentRowSize = 0;
long currentRowCount = 0;
try {
// Get the the ROW_COUNT_ESTIMATE_BATCH_SIZE fetched rows
// for a representative sample
for (int i = 0; i < ROW_COUNT_ESTIMATE_BATCH_SIZE; ++i) {
Result r = rs.next();
if (r == null)
break;
// Check for empty rows, see IMPALA-1451
if (r.isEmpty())
continue;
++currentRowCount;
// To estimate the number of rows we simply use the amount of bytes
// returned from the underlying buffer. Since HBase internally works
// with these structures as well this gives us ok estimates.
Cell[] cells = r.rawCells();
for (Cell c : cells) {
if (c instanceof KeyValue) {
currentRowSize += KeyValue.getKeyValueDataStructureSize(c.getRowLength(),
c.getFamilyLength(), c.getQualifierLength(), c.getValueLength(),
c.getTagsLength());
} else {
throw new IllegalStateException("Celltype " + c.getClass().getName() +
" not supported.");
}
}
}
} finally {
rs.close();
closeHBaseTable(table);
}
// If there are no rows then no need to estimate.
if (currentRowCount == 0) return new Pair<Long, Long>(0L, 0L);
// Get the size.
long currentSize = getRegionSize(location, clusterStatus);
// estimate the number of rows.
double bytesPerRow = currentRowSize / (double) currentRowCount;
if (currentSize == 0) {
return new Pair<Long, Long>(currentRowCount, (long) bytesPerRow);
}
// Compression factor two is only a best effort guess
long estimatedRowCount =
(long) ((isCompressed ? 2 : 1) * (currentSize / bytesPerRow));
return new Pair<Long, Long>(estimatedRowCount, (long) bytesPerRow);
}
/**
* Get an estimate of the number of rows and bytes per row in regions between
* startRowKey and endRowKey.
*
* This number is calculated by incrementally checking as many region servers as
* necessary until we observe a relatively constant row size per region on average.
* Depending on the skew of data in the regions this can either mean that we need
* to check only a minimal number of regions or that we will scan all regions.
*
* The HBase region servers periodically update the master with their metrics,
* including storefile size. We get the size of the storefiles for all regions in
* the cluster with a single call to getClusterStatus from the master.
*
* The accuracy of this number is determined by the number of rows that are written
* and kept in the memstore and have not been flushed until now. A large number
* of key-value pairs in the memstore will lead to bad estimates as this number
* is not reflected in the storefile size that is used to estimate this number.
*
* Currently, the algorithm does not consider the case that the key range used as a
* parameter might be generally of different size than the rest of the region.
*
* The values computed here should be cached so that in high qps workloads
* the nn is not overwhelmed. Could be done in load(); Synchronized to make
* sure that only one thread at a time is using the htable.
*
* @param startRowKey
* First row key in the range
* @param endRowKey
* Last row key in the range
* @return The estimated number of rows in the regions between the row keys (first) and
* the estimated row size in bytes (second).
*/
public synchronized Pair<Long, Long> getEstimatedRowStats(byte[] startRowKey,
byte[] endRowKey) {
Preconditions.checkNotNull(startRowKey);
Preconditions.checkNotNull(endRowKey);
boolean isCompressed = false;
long rowCount = 0;
long rowSize = 0;
org.apache.hadoop.hbase.client.Table table = null;
try {
table = getHBaseTable();
ClusterStatus clusterStatus = getClusterStatus();
// Check to see if things are compressed.
// If they are we'll estimate a compression factor.
if (columnFamilies_ == null) {
columnFamilies_ = table.getTableDescriptor().getColumnFamilies();
}
Preconditions.checkNotNull(columnFamilies_);
for (HColumnDescriptor desc : columnFamilies_) {
isCompressed |= desc.getCompression() != Compression.Algorithm.NONE;
}
// Fetch all regions for the key range
List<HRegionLocation> locations = getRegionsInRange(table, startRowKey, endRowKey);
Collections.shuffle(locations);
// The following variables track the number and size of 'rows' in
// HBase and allow incremental calculation of the average and standard
// deviation.
StatsHelper<Long> statsSize = new StatsHelper<Long>();
long totalEstimatedRows = 0;
// Collects stats samples from at least MIN_NUM_REGIONS_TO_CHECK
// and at most all regions until the delta is small enough.
while ((statsSize.count() < MIN_NUM_REGIONS_TO_CHECK ||
statsSize.stddev() > statsSize.mean() * DELTA_FROM_AVERAGE) &&
statsSize.count() < locations.size()) {
HRegionLocation currentLocation = locations.get((int) statsSize.count());
Pair<Long, Long> tmp = getEstimatedRowStatsForRegion(currentLocation,
isCompressed, clusterStatus);
totalEstimatedRows += tmp.first;
statsSize.addSample(tmp.second);
}
// Sum up the total size for all regions in range.
long totalSize = 0;
for (final HRegionLocation location : locations) {
totalSize += getRegionSize(location, clusterStatus);
}
if (totalSize == 0) {
rowCount = totalEstimatedRows;
} else {
rowCount = (long) (totalSize / statsSize.mean());
}
rowSize = (long) statsSize.mean();
} catch (IOException ioe) {
// Print the stack trace, but we'll ignore it
// as this is just an estimate.
// TODO: Put this into the per query log.
LOG.error("Error computing HBase row count estimate", ioe);
return new Pair<Long, Long>(-1l, -1l);
} finally {
if (table != null) closeHBaseTable(table);
}
return new Pair<Long, Long>(rowCount, rowSize);
}
/**
* Returns the size of the given region in bytes. Simply returns the storefile size
* for this region from the ClusterStatus. Returns 0 in case of an error.
*/
public long getRegionSize(HRegionLocation location, ClusterStatus clusterStatus) {
HRegionInfo info = location.getRegionInfo();
ServerLoad serverLoad = clusterStatus.getLoad(location.getServerName());
// If the serverLoad is null, the master doesn't have information for this region's
// server. This shouldn't normally happen.
if (serverLoad == null) {
LOG.error("Unable to find server load for server: " + location.getServerName() +
" for location " + info.getRegionNameAsString());
return 0;
}
RegionLoad regionLoad = serverLoad.getRegionsLoad().get(info.getRegionName());
if (regionLoad == null) {
LOG.error("Unable to find regions load for server: " + location.getServerName() +
" for location " + info.getRegionNameAsString());
return 0;
}
final long megaByte = 1024L * 1024L;
return regionLoad.getStorefileSizeMB() * megaByte;
}
/**
* Hive returns the columns in order of their declaration for HBase tables.
*/
@Override
public ArrayList<Column> getColumnsInHiveOrder() {
return getColumns();
}
@Override
public TTableDescriptor toThriftDescriptor(int tableId, Set<Long> referencedPartitions) {
TTableDescriptor tableDescriptor =
new TTableDescriptor(tableId, TTableType.HBASE_TABLE,
getTColumnDescriptors(), numClusteringCols_, hbaseTableName_, db_.getName());
tableDescriptor.setHbaseTable(getTHBaseTable());
return tableDescriptor;
}
public String getHBaseTableName() {
return hbaseTableName_;
}
@Override
public TCatalogObjectType getCatalogObjectType() {
return TCatalogObjectType.TABLE;
}
@Override
public TTable toThrift() {
TTable table = super.toThrift();
table.setTable_type(TTableType.HBASE_TABLE);
table.setHbase_table(getTHBaseTable());
return table;
}
private THBaseTable getTHBaseTable() {
THBaseTable tHbaseTable = new THBaseTable();
tHbaseTable.setTableName(hbaseTableName_);
for (Column c : getColumns()) {
HBaseColumn hbaseCol = (HBaseColumn) c;
tHbaseTable.addToFamilies(hbaseCol.getColumnFamily());
if (hbaseCol.getColumnQualifier() != null) {
tHbaseTable.addToQualifiers(hbaseCol.getColumnQualifier());
} else {
tHbaseTable.addToQualifiers("");
}
tHbaseTable.addToBinary_encoded(hbaseCol.isBinaryEncoded());
}
return tHbaseTable;
}
/**
* Get the corresponding regions for an arbitrary range of keys.
* This is copied from org.apache.hadoop.hbase.client.HTable in HBase 0.95. The
* differences are:
* 1. It does not use cache when calling getRegionLocation.
* 2. It is synchronized on hbaseTbl.
*
* @param startKey
* Starting key in range, inclusive
* @param endKey
* Ending key in range, exclusive
* @return A list of HRegionLocations corresponding to the regions that
* contain the specified range
* @throws IOException
* if a remote or network exception occurs
*/
public static List<HRegionLocation> getRegionsInRange(
org.apache.hadoop.hbase.client.Table hbaseTbl,
final byte[] startKey, final byte[] endKey) throws IOException {
final boolean endKeyIsEndOfTable = Bytes.equals(endKey, HConstants.EMPTY_END_ROW);
if ((Bytes.compareTo(startKey, endKey) > 0) && !endKeyIsEndOfTable) {
throw new IllegalArgumentException("Invalid range: " +
Bytes.toStringBinary(startKey) + " > " + Bytes.toStringBinary(endKey));
}
final List<HRegionLocation> regionList = new ArrayList<HRegionLocation>();
byte[] currentKey = startKey;
Connection connection = ConnectionHolder.getConnection(hbaseConf_);
// Make sure only one thread is accessing the hbaseTbl.
synchronized (hbaseTbl) {
RegionLocator locator = connection.getRegionLocator(hbaseTbl.getName());
do {
// always reload region location info.
HRegionLocation regionLocation = locator.getRegionLocation(currentKey, true);
regionList.add(regionLocation);
currentKey = regionLocation.getRegionInfo().getEndKey();
} while (!Bytes.equals(currentKey, HConstants.EMPTY_END_ROW) &&
(endKeyIsEndOfTable || Bytes.compareTo(currentKey, endKey) < 0));
}
return regionList;
}
/**
* Returns the storage handler class for HBase tables read by Hive.
*/
@Override
public String getStorageHandlerClassName() {
return HBASE_STORAGE_HANDLER;
}
/**
* Returns statistics on this table as a tabular result set. Used for the
* SHOW TABLE STATS statement. The schema of the returned TResultSet is set
* inside this method.
*/
public TResultSet getTableStats() {
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
resultSchema.addToColumns(
new TColumn("Region Location", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Start RowKey",
Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Est. #Rows", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Size", Type.STRING.toThrift()));
org.apache.hadoop.hbase.client.Table table;
try {
table = getHBaseTable();
} catch (IOException e) {
LOG.error("Error getting HBase table " + hbaseTableName_, e);
throw new RuntimeException(e);
}
// TODO: Consider fancier stats maintenance techniques for speeding up this process.
// Currently, we list all regions and perform a mini-scan of each of them to
// estimate the number of rows, the data size, etc., which is rather expensive.
try {
ClusterStatus clusterStatus = getClusterStatus();
long totalNumRows = 0;
long totalSize = 0;
List<HRegionLocation> regions = HBaseTable.getRegionsInRange(table,
HConstants.EMPTY_END_ROW, HConstants.EMPTY_START_ROW);
for (HRegionLocation region : regions) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
HRegionInfo regionInfo = region.getRegionInfo();
Pair<Long, Long> estRowStats =
getEstimatedRowStatsForRegion(region, false, clusterStatus);
long numRows = estRowStats.first.longValue();
long regionSize = getRegionSize(region, clusterStatus);
totalNumRows += numRows;
totalSize += regionSize;
// Add the region location, start rowkey, number of rows and raw size.
rowBuilder.add(String.valueOf(region.getHostname()))
.add(Bytes.toString(regionInfo.getStartKey())).add(numRows)
.addBytes(regionSize);
result.addToRows(rowBuilder.get());
}
// Total num rows and raw region size.
if (regions.size() > 1) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
rowBuilder.add("Total").add("").add(totalNumRows).addBytes(totalSize);
result.addToRows(rowBuilder.get());
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
closeHBaseTable(table);
}
return result;
}
/**
* Returns true if the given Metastore Table represents an HBase table.
* Versions of Hive/HBase are inconsistent which HBase related fields are set
* (e.g., HIVE-6548 changed the input format to null).
* For maximum compatibility consider all known fields that indicate an HBase table.
*/
public static boolean isHBaseTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
if (msTbl.getParameters() != null &&
msTbl.getParameters().containsKey(HBASE_STORAGE_HANDLER)) {
return true;
}
StorageDescriptor sd = msTbl.getSd();
if (sd == null) return false;
if (sd.getInputFormat() != null && sd.getInputFormat().equals(HBASE_INPUT_FORMAT)) {
return true;
} else if (sd.getSerdeInfo() != null &&
sd.getSerdeInfo().getSerializationLib() != null &&
sd.getSerdeInfo().getSerializationLib().equals(HBASE_SERIALIZATION_LIB)) {
return true;
}
return false;
}
}
| fe/src/main/java/org/apache/impala/catalog/HBaseTable.java | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.catalog;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.RegionLoad;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hive.hbase.HBaseSerDe;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.log4j.Logger;
import org.apache.impala.common.Pair;
import org.apache.impala.thrift.TCatalogObjectType;
import org.apache.impala.thrift.TColumn;
import org.apache.impala.thrift.THBaseTable;
import org.apache.impala.thrift.TResultSet;
import org.apache.impala.thrift.TResultSetMetadata;
import org.apache.impala.thrift.TTable;
import org.apache.impala.thrift.TTableDescriptor;
import org.apache.impala.thrift.TTableType;
import org.apache.impala.util.StatsHelper;
import org.apache.impala.util.TResultRowBuilder;
import com.codahale.metrics.Timer;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Impala representation of HBase table metadata,
* as loaded from Hive's metastore.
* This implies that we inherit the metastore's limitations related to HBase,
* for example the lack of support for composite HBase row keys.
* We sort the HBase columns (cols) by family/qualifier
* to simplify the retrieval logic in the backend, since
* HBase returns data ordered by family/qualifier.
* This implies that a "select *"-query on an HBase table
* will not have the columns ordered as they were declared in the DDL.
* They will be ordered by family/qualifier.
*
*/
public class HBaseTable extends Table {
// Maximum deviation from the average to stop querying more regions
// to estimate the row count
private static final double DELTA_FROM_AVERAGE = 0.15;
private static final Logger LOG = Logger.getLogger(HBaseTable.class);
// Copied from Hive's HBaseStorageHandler.java.
public static final String DEFAULT_PREFIX = "default.";
// Number of rows fetched during the row count estimation per region
public static final int ROW_COUNT_ESTIMATE_BATCH_SIZE = 10;
// Minimum number of regions that are checked to estimate the row count
private static final int MIN_NUM_REGIONS_TO_CHECK = 5;
// Name of table in HBase.
// 'this.name' is the alias of the HBase table in Hive.
protected String hbaseTableName_;
// Input format class for HBase tables read by Hive.
private static final String HBASE_INPUT_FORMAT =
"org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat";
// Serialization class for HBase tables set in the corresponding Metastore table.
private static final String HBASE_SERIALIZATION_LIB =
"org.apache.hadoop.hive.hbase.HBaseSerDe";
// Storage handler class for HBase tables read by Hive.
private static final String HBASE_STORAGE_HANDLER =
"org.apache.hadoop.hive.hbase.HBaseStorageHandler";
// Column family of HBase row key
private static final String ROW_KEY_COLUMN_FAMILY = ":key";
// Keep the conf around
private final static Configuration hbaseConf_ = HBaseConfiguration.create();
// Cached column families. Used primarily for speeding up row stats estimation
// (see IMPALA-4211).
private HColumnDescriptor[] columnFamilies_ = null;
protected HBaseTable(org.apache.hadoop.hive.metastore.api.Table msTbl,
Db db, String name, String owner) {
super(msTbl, db, name, owner);
}
/**
* Connection instances are expensive to create. The HBase documentation recommends
* one and then sharing it among threads. All operations on a connection are
* thread-safe.
*/
private static class ConnectionHolder {
private static Connection connection_ = null;
public static synchronized Connection getConnection(Configuration conf)
throws IOException {
if (connection_ == null || connection_.isClosed()) {
connection_ = ConnectionFactory.createConnection(conf);
}
return connection_;
}
}
/**
* Table client objects are thread-unsafe and cheap to create. The HBase docs recommend
* creating a new one for each task and then closing when done.
*/
public org.apache.hadoop.hbase.client.Table getHBaseTable() throws IOException {
return ConnectionHolder.getConnection(hbaseConf_)
.getTable(TableName.valueOf(hbaseTableName_));
}
private void closeHBaseTable(org.apache.hadoop.hbase.client.Table table) {
try {
table.close();
} catch (IOException e) {
LOG.error("Error closing HBase table: " + hbaseTableName_, e);
}
}
/**
* Get the cluster status, making sure we close the admin client afterwards.
*/
public ClusterStatus getClusterStatus() throws IOException {
Admin admin = null;
ClusterStatus clusterStatus = null;
try {
Connection connection = ConnectionHolder.getConnection(hbaseConf_);
admin = connection.getAdmin();
clusterStatus = admin.getClusterStatus();
} finally {
if (admin != null) admin.close();
}
return clusterStatus;
}
/**
* Parse the column description string to the column families and column
* qualifies. This is a copy of HBaseSerDe.parseColumnMapping and
* parseColumnStorageTypes with parts we don't use removed. The hive functions
* are not public.
* tableDefaultStorageIsBinary - true if table is default to binary encoding
* columnsMappingSpec - input string format describing the table
* fieldSchemas - input field schema from metastore table
* columnFamilies/columnQualifiers/columnBinaryEncodings - out parameters that will be
* filled with the column family, column qualifier and encoding for each column.
*/
private void parseColumnMapping(boolean tableDefaultStorageIsBinary,
String columnsMappingSpec, List<FieldSchema> fieldSchemas,
List<String> columnFamilies, List<String> columnQualifiers,
List<Boolean> colIsBinaryEncoded) throws SerDeException {
if (columnsMappingSpec == null) {
throw new SerDeException(
"Error: hbase.columns.mapping missing for this HBase table.");
}
if (columnsMappingSpec.equals("") ||
columnsMappingSpec.equals(HBaseSerDe.HBASE_KEY_COL)) {
throw new SerDeException("Error: hbase.columns.mapping specifies only "
+ "the HBase table row key. A valid Hive-HBase table must specify at "
+ "least one additional column.");
}
int rowKeyIndex = -1;
String[] columnSpecs = columnsMappingSpec.split(",");
// If there was an implicit key column mapping, the number of columns (fieldSchemas)
// will be one more than the number of column mapping specs.
int fsStartIdxOffset = fieldSchemas.size() - columnSpecs.length;
if (fsStartIdxOffset != 0 && fsStartIdxOffset != 1) {
// This should never happen - Hive blocks creating a mismatched table and both Hive
// and Impala currently block all column-level DDL on HBase tables.
throw new SerDeException(String.format("Number of entries in " +
"'hbase.columns.mapping' does not match the number of columns in the " +
"table: %d != %d (counting the key if implicit)",
columnSpecs.length, fieldSchemas.size()));
}
for (int i = 0; i < columnSpecs.length; ++i) {
String mappingSpec = columnSpecs[i];
String[] mapInfo = mappingSpec.split("#");
// Trim column info so that serdeproperties with new lines still parse correctly.
String colInfo = mapInfo[0].trim();
int idxFirst = colInfo.indexOf(":");
int idxLast = colInfo.lastIndexOf(":");
if (idxFirst < 0 || !(idxFirst == idxLast)) {
throw new SerDeException("Error: the HBase columns mapping contains a "
+ "badly formed column family, column qualifier specification.");
}
if (colInfo.equals(HBaseSerDe.HBASE_KEY_COL)) {
Preconditions.checkState(fsStartIdxOffset == 0);
rowKeyIndex = i;
columnFamilies.add(colInfo);
columnQualifiers.add(null);
} else {
String[] parts = colInfo.split(":");
Preconditions.checkState(parts.length > 0 && parts.length <= 2);
columnFamilies.add(parts[0]);
if (parts.length == 2) {
columnQualifiers.add(parts[1]);
} else {
columnQualifiers.add(null);
}
}
// Set column binary encoding
FieldSchema fieldSchema = fieldSchemas.get(i + fsStartIdxOffset);
boolean supportsBinaryEncoding = supportsBinaryEncoding(fieldSchema);
if (mapInfo.length == 1) {
// There is no column level storage specification. Use the table storage spec.
colIsBinaryEncoded.add(
new Boolean(tableDefaultStorageIsBinary && supportsBinaryEncoding));
} else if (mapInfo.length == 2) {
// There is a storage specification for the column
String storageOption = mapInfo[1];
if (!(storageOption.equals("-") || "string".startsWith(storageOption) || "binary"
.startsWith(storageOption))) {
throw new SerDeException("Error: A column storage specification is one of"
+ " the following: '-', a prefix of 'string', or a prefix of 'binary'. "
+ storageOption + " is not a valid storage option specification for "
+ fieldSchema.getName());
}
boolean isBinaryEncoded = false;
if ("-".equals(storageOption)) {
isBinaryEncoded = tableDefaultStorageIsBinary;
} else if ("binary".startsWith(storageOption)) {
isBinaryEncoded = true;
}
if (isBinaryEncoded && !supportsBinaryEncoding) {
// Use string encoding and log a warning if the column spec is binary but the
// column type does not support it.
// TODO: Hive/HBase does not raise an exception, but should we?
LOG.warn("Column storage specification for column " + fieldSchema.getName()
+ " is binary" + " but the column type " + fieldSchema.getType() +
" does not support binary encoding. Fallback to string format.");
isBinaryEncoded = false;
}
colIsBinaryEncoded.add(isBinaryEncoded);
} else {
// error in storage specification
throw new SerDeException("Error: " + HBaseSerDe.HBASE_COLUMNS_MAPPING
+ " storage specification " + mappingSpec + " is not valid for column: "
+ fieldSchema.getName());
}
}
if (rowKeyIndex == -1) {
columnFamilies.add(0, HBaseSerDe.HBASE_KEY_COL);
columnQualifiers.add(0, null);
colIsBinaryEncoded.add(0,
supportsBinaryEncoding(fieldSchemas.get(0)) && tableDefaultStorageIsBinary);
}
}
private boolean supportsBinaryEncoding(FieldSchema fs) {
try {
Type colType = parseColumnType(fs);
// Only boolean, integer and floating point types can use binary storage.
return colType.isBoolean() || colType.isIntegerType()
|| colType.isFloatingPointType();
} catch (TableLoadingException e) {
return false;
}
}
@Override
/**
* For hbase tables, we can support tables with columns we don't understand at
* all (e.g. map) as long as the user does not select those. This is in contrast
* to hdfs tables since we typically need to understand all columns to make sense
* of the file at all.
*/
public void load(boolean reuseMetadata, IMetaStoreClient client,
org.apache.hadoop.hive.metastore.api.Table msTbl) throws TableLoadingException {
Preconditions.checkNotNull(getMetaStoreTable());
final Timer.Context context =
getMetrics().getTimer(Table.LOAD_DURATION_METRIC).time();
try {
msTable_ = msTbl;
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
// Warm up the connection and verify the table exists.
getHBaseTable().close();
columnFamilies_ = null;
Map<String, String> serdeParams =
getMetaStoreTable().getSd().getSerdeInfo().getParameters();
String hbaseColumnsMapping = serdeParams.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
if (hbaseColumnsMapping == null) {
throw new MetaException("No hbase.columns.mapping defined in Serde.");
}
String hbaseTableDefaultStorageType = getMetaStoreTable().getParameters().get(
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE);
boolean tableDefaultStorageIsBinary = false;
if (hbaseTableDefaultStorageType != null &&
!hbaseTableDefaultStorageType.isEmpty()) {
if (hbaseTableDefaultStorageType.equalsIgnoreCase("binary")) {
tableDefaultStorageIsBinary = true;
} else if (!hbaseTableDefaultStorageType.equalsIgnoreCase("string")) {
throw new SerDeException("Error: " +
HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE +
" parameter must be specified as" +
" 'string' or 'binary'; '" + hbaseTableDefaultStorageType +
"' is not a valid specification for this table/serde property.");
}
}
// Parse HBase column-mapping string.
List<FieldSchema> fieldSchemas = getMetaStoreTable().getSd().getCols();
List<String> hbaseColumnFamilies = new ArrayList<String>();
List<String> hbaseColumnQualifiers = new ArrayList<String>();
List<Boolean> hbaseColumnBinaryEncodings = new ArrayList<Boolean>();
parseColumnMapping(tableDefaultStorageIsBinary, hbaseColumnsMapping, fieldSchemas,
hbaseColumnFamilies, hbaseColumnQualifiers, hbaseColumnBinaryEncodings);
Preconditions.checkState(
hbaseColumnFamilies.size() == hbaseColumnQualifiers.size());
Preconditions.checkState(fieldSchemas.size() == hbaseColumnFamilies.size());
// Populate tmp cols in the order they appear in the Hive metastore.
// We will reorder the cols below.
List<HBaseColumn> tmpCols = Lists.newArrayList();
// Store the key column separately.
// TODO: Change this to an ArrayList once we support composite row keys.
HBaseColumn keyCol = null;
for (int i = 0; i < fieldSchemas.size(); ++i) {
FieldSchema s = fieldSchemas.get(i);
Type t = Type.INVALID;
try {
t = parseColumnType(s);
} catch (TableLoadingException e) {
// Ignore hbase types we don't support yet. We can load the metadata
// but won't be able to select from it.
}
HBaseColumn col = new HBaseColumn(s.getName(), hbaseColumnFamilies.get(i),
hbaseColumnQualifiers.get(i), hbaseColumnBinaryEncodings.get(i),
t, s.getComment(), -1);
if (col.getColumnFamily().equals(ROW_KEY_COLUMN_FAMILY)) {
// Store the row key column separately from the rest
keyCol = col;
} else {
tmpCols.add(col);
}
}
Preconditions.checkState(keyCol != null);
// The backend assumes that the row key column is always first and
// that the remaining HBase columns are ordered by columnFamily,columnQualifier,
// so the final position depends on the other mapped HBase columns.
// Sort columns and update positions.
Collections.sort(tmpCols);
clearColumns();
keyCol.setPosition(0);
addColumn(keyCol);
// Update the positions of the remaining columns
for (int i = 0; i < tmpCols.size(); ++i) {
HBaseColumn col = tmpCols.get(i);
col.setPosition(i + 1);
addColumn(col);
}
// Set table stats.
setTableStats(msTable_);
// since we don't support composite hbase rowkeys yet, all hbase tables have a
// single clustering col
numClusteringCols_ = 1;
loadAllColumnStats(client);
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table: " +
name_, e);
} finally {
context.stop();
}
}
@Override
protected void loadFromThrift(TTable table) throws TableLoadingException {
super.loadFromThrift(table);
try {
hbaseTableName_ = getHBaseTableName(getMetaStoreTable());
// Warm up the connection and verify the table exists.
getHBaseTable().close();
columnFamilies_ = null;
} catch (Exception e) {
throw new TableLoadingException("Failed to load metadata for HBase table from " +
"thrift table: " + name_, e);
}
}
/**
* This method is completely copied from Hive's HBaseStorageHandler.java.
*/
private String getHBaseTableName(org.apache.hadoop.hive.metastore.api.Table tbl) {
// Give preference to TBLPROPERTIES over SERDEPROPERTIES
// (really we should only use TBLPROPERTIES, so this is just
// for backwards compatibility with the original specs).
String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME);
if (tableName == null) {
tableName = tbl.getSd().getSerdeInfo().getParameters().get(
HBaseSerDe.HBASE_TABLE_NAME);
}
if (tableName == null) {
tableName = tbl.getDbName() + "." + tbl.getTableName();
if (tableName.startsWith(DEFAULT_PREFIX)) {
tableName = tableName.substring(DEFAULT_PREFIX.length());
}
}
return tableName;
}
/**
* Estimates the number of rows for a single region and returns a pair with
* the estimated row count and the estimated size in bytes per row.
*/
private Pair<Long, Long> getEstimatedRowStatsForRegion(HRegionLocation location,
boolean isCompressed, ClusterStatus clusterStatus) throws IOException {
HRegionInfo info = location.getRegionInfo();
Scan s = new Scan(info.getStartKey());
// Get a small sample of rows
s.setBatch(ROW_COUNT_ESTIMATE_BATCH_SIZE);
// Try and get every version so the row's size can be used to estimate.
s.setMaxVersions(Short.MAX_VALUE);
// Don't cache the blocks as we don't think these are
// necessarily important blocks.
s.setCacheBlocks(false);
// Try and get deletes too so their size can be counted.
s.setRaw(false);
org.apache.hadoop.hbase.client.Table table = getHBaseTable();
ResultScanner rs = table.getScanner(s);
long currentRowSize = 0;
long currentRowCount = 0;
try {
// Get the the ROW_COUNT_ESTIMATE_BATCH_SIZE fetched rows
// for a representative sample
for (int i = 0; i < ROW_COUNT_ESTIMATE_BATCH_SIZE; ++i) {
Result r = rs.next();
if (r == null)
break;
// Check for empty rows, see IMPALA-1451
if (r.isEmpty())
continue;
++currentRowCount;
// To estimate the number of rows we simply use the amount of bytes
// returned from the underlying buffer. Since HBase internally works
// with these structures as well this gives us ok estimates.
Cell[] cells = r.rawCells();
for (Cell c : cells) {
if (c instanceof KeyValue) {
currentRowSize += KeyValue.getKeyValueDataStructureSize(c.getRowLength(),
c.getFamilyLength(), c.getQualifierLength(), c.getValueLength(),
c.getTagsLength());
} else {
throw new IllegalStateException("Celltype " + c.getClass().getName() +
" not supported.");
}
}
}
} finally {
rs.close();
closeHBaseTable(table);
}
// If there are no rows then no need to estimate.
if (currentRowCount == 0) return new Pair<Long, Long>(0L, 0L);
// Get the size.
long currentSize = getRegionSize(location, clusterStatus);
// estimate the number of rows.
double bytesPerRow = currentRowSize / (double) currentRowCount;
if (currentSize == 0) {
return new Pair<Long, Long>(currentRowCount, (long) bytesPerRow);
}
// Compression factor two is only a best effort guess
long estimatedRowCount =
(long) ((isCompressed ? 2 : 1) * (currentSize / bytesPerRow));
return new Pair<Long, Long>(estimatedRowCount, (long) bytesPerRow);
}
/**
* Get an estimate of the number of rows and bytes per row in regions between
* startRowKey and endRowKey.
*
* This number is calculated by incrementally checking as many region servers as
* necessary until we observe a relatively constant row size per region on average.
* Depending on the skew of data in the regions this can either mean that we need
* to check only a minimal number of regions or that we will scan all regions.
*
* The HBase region servers periodically update the master with their metrics,
* including storefile size. We get the size of the storefiles for all regions in
* the cluster with a single call to getClusterStatus from the master.
*
* The accuracy of this number is determined by the number of rows that are written
* and kept in the memstore and have not been flushed until now. A large number
* of key-value pairs in the memstore will lead to bad estimates as this number
* is not reflected in the storefile size that is used to estimate this number.
*
* Currently, the algorithm does not consider the case that the key range used as a
* parameter might be generally of different size than the rest of the region.
*
* The values computed here should be cached so that in high qps workloads
* the nn is not overwhelmed. Could be done in load(); Synchronized to make
* sure that only one thread at a time is using the htable.
*
* @param startRowKey
* First row key in the range
* @param endRowKey
* Last row key in the range
* @return The estimated number of rows in the regions between the row keys (first) and
* the estimated row size in bytes (second).
*/
public synchronized Pair<Long, Long> getEstimatedRowStats(byte[] startRowKey,
byte[] endRowKey) {
Preconditions.checkNotNull(startRowKey);
Preconditions.checkNotNull(endRowKey);
boolean isCompressed = false;
long rowCount = 0;
long rowSize = 0;
org.apache.hadoop.hbase.client.Table table = null;
try {
table = getHBaseTable();
ClusterStatus clusterStatus = getClusterStatus();
// Check to see if things are compressed.
// If they are we'll estimate a compression factor.
if (columnFamilies_ == null) {
columnFamilies_ = table.getTableDescriptor().getColumnFamilies();
}
Preconditions.checkNotNull(columnFamilies_);
for (HColumnDescriptor desc : columnFamilies_) {
isCompressed |= desc.getCompression() != Compression.Algorithm.NONE;
}
// Fetch all regions for the key range
List<HRegionLocation> locations = getRegionsInRange(table, startRowKey, endRowKey);
Collections.shuffle(locations);
// The following variables track the number and size of 'rows' in
// HBase and allow incremental calculation of the average and standard
// deviation.
StatsHelper<Long> statsSize = new StatsHelper<Long>();
long totalEstimatedRows = 0;
// Collects stats samples from at least MIN_NUM_REGIONS_TO_CHECK
// and at most all regions until the delta is small enough.
while ((statsSize.count() < MIN_NUM_REGIONS_TO_CHECK ||
statsSize.stddev() > statsSize.mean() * DELTA_FROM_AVERAGE) &&
statsSize.count() < locations.size()) {
HRegionLocation currentLocation = locations.get((int) statsSize.count());
Pair<Long, Long> tmp = getEstimatedRowStatsForRegion(currentLocation,
isCompressed, clusterStatus);
totalEstimatedRows += tmp.first;
statsSize.addSample(tmp.second);
}
// Sum up the total size for all regions in range.
long totalSize = 0;
for (final HRegionLocation location : locations) {
totalSize += getRegionSize(location, clusterStatus);
}
if (totalSize == 0) {
rowCount = totalEstimatedRows;
} else {
rowCount = (long) (totalSize / statsSize.mean());
}
rowSize = (long) statsSize.mean();
} catch (IOException ioe) {
// Print the stack trace, but we'll ignore it
// as this is just an estimate.
// TODO: Put this into the per query log.
LOG.error("Error computing HBase row count estimate", ioe);
return new Pair<Long, Long>(-1l, -1l);
} finally {
if (table != null) closeHBaseTable(table);
}
return new Pair<Long, Long>(rowCount, rowSize);
}
/**
* Returns the size of the given region in bytes. Simply returns the storefile size
* for this region from the ClusterStatus. Returns 0 in case of an error.
*/
public long getRegionSize(HRegionLocation location, ClusterStatus clusterStatus) {
HRegionInfo info = location.getRegionInfo();
ServerLoad serverLoad = clusterStatus.getLoad(location.getServerName());
// If the serverLoad is null, the master doesn't have information for this region's
// server. This shouldn't normally happen.
if (serverLoad == null) {
LOG.error("Unable to find load for server: " + location.getServerName() +
" for location " + info.getRegionNameAsString());
return 0;
}
RegionLoad regionLoad = serverLoad.getRegionsLoad().get(info.getRegionName());
final long megaByte = 1024L * 1024L;
return regionLoad.getStorefileSizeMB() * megaByte;
}
/**
* Hive returns the columns in order of their declaration for HBase tables.
*/
@Override
public ArrayList<Column> getColumnsInHiveOrder() {
return getColumns();
}
@Override
public TTableDescriptor toThriftDescriptor(int tableId, Set<Long> referencedPartitions) {
TTableDescriptor tableDescriptor =
new TTableDescriptor(tableId, TTableType.HBASE_TABLE,
getTColumnDescriptors(), numClusteringCols_, hbaseTableName_, db_.getName());
tableDescriptor.setHbaseTable(getTHBaseTable());
return tableDescriptor;
}
public String getHBaseTableName() {
return hbaseTableName_;
}
@Override
public TCatalogObjectType getCatalogObjectType() {
return TCatalogObjectType.TABLE;
}
@Override
public TTable toThrift() {
TTable table = super.toThrift();
table.setTable_type(TTableType.HBASE_TABLE);
table.setHbase_table(getTHBaseTable());
return table;
}
private THBaseTable getTHBaseTable() {
THBaseTable tHbaseTable = new THBaseTable();
tHbaseTable.setTableName(hbaseTableName_);
for (Column c : getColumns()) {
HBaseColumn hbaseCol = (HBaseColumn) c;
tHbaseTable.addToFamilies(hbaseCol.getColumnFamily());
if (hbaseCol.getColumnQualifier() != null) {
tHbaseTable.addToQualifiers(hbaseCol.getColumnQualifier());
} else {
tHbaseTable.addToQualifiers("");
}
tHbaseTable.addToBinary_encoded(hbaseCol.isBinaryEncoded());
}
return tHbaseTable;
}
/**
* Get the corresponding regions for an arbitrary range of keys.
* This is copied from org.apache.hadoop.hbase.client.HTable in HBase 0.95. The
* differences are:
* 1. It does not use cache when calling getRegionLocation.
* 2. It is synchronized on hbaseTbl.
*
* @param startKey
* Starting key in range, inclusive
* @param endKey
* Ending key in range, exclusive
* @return A list of HRegionLocations corresponding to the regions that
* contain the specified range
* @throws IOException
* if a remote or network exception occurs
*/
public static List<HRegionLocation> getRegionsInRange(
org.apache.hadoop.hbase.client.Table hbaseTbl,
final byte[] startKey, final byte[] endKey) throws IOException {
final boolean endKeyIsEndOfTable = Bytes.equals(endKey, HConstants.EMPTY_END_ROW);
if ((Bytes.compareTo(startKey, endKey) > 0) && !endKeyIsEndOfTable) {
throw new IllegalArgumentException("Invalid range: " +
Bytes.toStringBinary(startKey) + " > " + Bytes.toStringBinary(endKey));
}
final List<HRegionLocation> regionList = new ArrayList<HRegionLocation>();
byte[] currentKey = startKey;
Connection connection = ConnectionHolder.getConnection(hbaseConf_);
// Make sure only one thread is accessing the hbaseTbl.
synchronized (hbaseTbl) {
RegionLocator locator = connection.getRegionLocator(hbaseTbl.getName());
do {
// always reload region location info.
HRegionLocation regionLocation = locator.getRegionLocation(currentKey, true);
regionList.add(regionLocation);
currentKey = regionLocation.getRegionInfo().getEndKey();
} while (!Bytes.equals(currentKey, HConstants.EMPTY_END_ROW) &&
(endKeyIsEndOfTable || Bytes.compareTo(currentKey, endKey) < 0));
}
return regionList;
}
/**
* Returns the storage handler class for HBase tables read by Hive.
*/
@Override
public String getStorageHandlerClassName() {
return HBASE_STORAGE_HANDLER;
}
/**
* Returns statistics on this table as a tabular result set. Used for the
* SHOW TABLE STATS statement. The schema of the returned TResultSet is set
* inside this method.
*/
public TResultSet getTableStats() {
TResultSet result = new TResultSet();
TResultSetMetadata resultSchema = new TResultSetMetadata();
result.setSchema(resultSchema);
resultSchema.addToColumns(
new TColumn("Region Location", Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Start RowKey",
Type.STRING.toThrift()));
resultSchema.addToColumns(new TColumn("Est. #Rows", Type.BIGINT.toThrift()));
resultSchema.addToColumns(new TColumn("Size", Type.STRING.toThrift()));
org.apache.hadoop.hbase.client.Table table;
try {
table = getHBaseTable();
} catch (IOException e) {
LOG.error("Error getting HBase table " + hbaseTableName_, e);
throw new RuntimeException(e);
}
// TODO: Consider fancier stats maintenance techniques for speeding up this process.
// Currently, we list all regions and perform a mini-scan of each of them to
// estimate the number of rows, the data size, etc., which is rather expensive.
try {
ClusterStatus clusterStatus = getClusterStatus();
long totalNumRows = 0;
long totalSize = 0;
List<HRegionLocation> regions = HBaseTable.getRegionsInRange(table,
HConstants.EMPTY_END_ROW, HConstants.EMPTY_START_ROW);
for (HRegionLocation region : regions) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
HRegionInfo regionInfo = region.getRegionInfo();
Pair<Long, Long> estRowStats =
getEstimatedRowStatsForRegion(region, false, clusterStatus);
long numRows = estRowStats.first.longValue();
long regionSize = getRegionSize(region, clusterStatus);
totalNumRows += numRows;
totalSize += regionSize;
// Add the region location, start rowkey, number of rows and raw size.
rowBuilder.add(String.valueOf(region.getHostname()))
.add(Bytes.toString(regionInfo.getStartKey())).add(numRows)
.addBytes(regionSize);
result.addToRows(rowBuilder.get());
}
// Total num rows and raw region size.
if (regions.size() > 1) {
TResultRowBuilder rowBuilder = new TResultRowBuilder();
rowBuilder.add("Total").add("").add(totalNumRows).addBytes(totalSize);
result.addToRows(rowBuilder.get());
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
closeHBaseTable(table);
}
return result;
}
/**
* Returns true if the given Metastore Table represents an HBase table.
* Versions of Hive/HBase are inconsistent which HBase related fields are set
* (e.g., HIVE-6548 changed the input format to null).
* For maximum compatibility consider all known fields that indicate an HBase table.
*/
public static boolean isHBaseTable(
org.apache.hadoop.hive.metastore.api.Table msTbl) {
if (msTbl.getParameters() != null &&
msTbl.getParameters().containsKey(HBASE_STORAGE_HANDLER)) {
return true;
}
StorageDescriptor sd = msTbl.getSd();
if (sd == null) return false;
if (sd.getInputFormat() != null && sd.getInputFormat().equals(HBASE_INPUT_FORMAT)) {
return true;
} else if (sd.getSerdeInfo() != null &&
sd.getSerdeInfo().getSerializationLib() != null &&
sd.getSerdeInfo().getSerializationLib().equals(HBASE_SERIALIZATION_LIB)) {
return true;
}
return false;
}
}
| IMPALA-7091: Address NullPointerException in HBaseTable.getRegionSize().
It's possible for "serverLoad.getRegionsLoad().get(info.getRegionName())"
to be null, which causes a NullPointerException in the planner, and
visible to the user. The code around it already says that it handles
errors by returning 0 for the size, and I've extended that to one more
case.
In practice, I have seen this come up in failures of the following test:
failure.test_failpoints.TestFailpoints.test_failpoints[table_format: hbase/none | exec_option: {'batch_size': 0, 'num_nodes': 0, 'disable_codegen_rows_threshold': 0, 'disable_codegen': False, 'abort_on_error': 1, 'debug_action': None, 'exec_single_node_rows_threshold': 0} | mt_dop: 4 | location: OPEN | action: MEM_LIMIT_EXCEEDED | query: select * from alltypessmall union all select * from alltypessmall]
I saw this failure only happen in some test-with-docker runs,
inconsistently. The error is a little bit hard to spot, but by
correlating the timestamp of the failing test (which just complains
about NullPointerException), you can find a Java stack trace complaining
of a NPE in "regionLoad.getStorefileSizeMB()". I think the likely cause
is regionLoad being null.
Change-Id: I02f06daf69e7f7e97c9ecc13997147530c2f9d3f
Reviewed-on: http://gerrit.cloudera.org:8080/10531
Reviewed-by: Joe McDonnell <[email protected]>
Tested-by: Impala Public Jenkins <[email protected]>
| fe/src/main/java/org/apache/impala/catalog/HBaseTable.java | IMPALA-7091: Address NullPointerException in HBaseTable.getRegionSize(). | <ide><path>e/src/main/java/org/apache/impala/catalog/HBaseTable.java
<ide> // If the serverLoad is null, the master doesn't have information for this region's
<ide> // server. This shouldn't normally happen.
<ide> if (serverLoad == null) {
<del> LOG.error("Unable to find load for server: " + location.getServerName() +
<add> LOG.error("Unable to find server load for server: " + location.getServerName() +
<ide> " for location " + info.getRegionNameAsString());
<ide> return 0;
<ide> }
<ide> RegionLoad regionLoad = serverLoad.getRegionsLoad().get(info.getRegionName());
<del>
<add> if (regionLoad == null) {
<add> LOG.error("Unable to find regions load for server: " + location.getServerName() +
<add> " for location " + info.getRegionNameAsString());
<add> return 0;
<add> }
<ide> final long megaByte = 1024L * 1024L;
<ide> return regionLoad.getStorefileSizeMB() * megaByte;
<ide> } |
|
Java | agpl-3.0 | 079edac32fa2a08fcf890a413cb3b5cd08fb4cf4 | 0 | EhsanTang/ApiManager,EhsanTang/ApiManager,EhsanTang/ApiManager | package cn.crap.controller.user;
import cn.crap.adapter.ErrorAdapter;
import cn.crap.adapter.InterfaceAdapter;
import cn.crap.dto.CategoryDto;
import cn.crap.dto.DictionaryDto;
import cn.crap.dto.InterfacePDFDto;
import cn.crap.dto.SettingDto;
import cn.crap.enumer.ArticleType;
import cn.crap.enumer.MyError;
import cn.crap.enumer.ProjectType;
import cn.crap.framework.JsonResult;
import cn.crap.framework.MyException;
import cn.crap.framework.base.BaseController;
import cn.crap.model.mybatis.*;
import cn.crap.model.mybatis.Error;
import cn.crap.service.custom.CustomArticleService;
import cn.crap.service.custom.CustomErrorService;
import cn.crap.service.custom.CustomInterfaceService;
import cn.crap.service.custom.CustomModuleService;
import cn.crap.service.mybatis.ArticleService;
import cn.crap.service.mybatis.InterfaceService;
import cn.crap.service.mybatis.ModuleService;
import cn.crap.beans.Config;
import cn.crap.utils.*;
import net.sf.json.JSONArray;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Controller
@RequestMapping("/user/staticize")
public class StaticizeController extends BaseController{
@Autowired
private CustomInterfaceService customInterfaceService;
@Autowired
private ModuleService moduleService;
@Autowired
private Config config;
@Autowired
private InterfaceService interfaceService;
@Autowired
private CustomErrorService customErrorService;
@Autowired
private ArticleService articleService;
@Autowired
private CustomArticleService customArticleService;
@Autowired
private CustomModuleService customModuleService;
/**
* 静态化错误码列表
*/
@RequestMapping("/errorList.do")
public ModelAndView staticizeError(HttpServletRequest req, @RequestParam String projectId,@RequestParam int currentPage,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Project project = projectCache.get(projectId);
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(null, project, "-错误码");
Page page = new Page(15, currentPage);
List<Error> errorModels = customErrorService.queryByProjectId(projectId, null, null, page);
returnMap.put("page", page);
returnMap.put("errorList", ErrorAdapter.getDto(errorModels));
returnMap.put("activePage","errorList");
returnMap.put("url", "errorList");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/errorList.jsp",returnMap);
}
private String getStaticPath(Project project) {
return Tools.getServicePath() + "static/"+project.getId();
}
/**
* 静态化接口列表
*/
@RequestMapping("/interfaceList.do")
public ModelAndView interfaceList(HttpServletRequest req, @RequestParam String moduleId, @RequestParam int currentPage,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Module module = moduleCache.get(moduleId);
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-接口");
Map<String, Object> map = Tools.getMap("moduleId", moduleId);
Page page = new Page(15, currentPage);
returnMap.put("page", page);
returnMap.put("interfaceList", InterfaceAdapter.getDto(customInterfaceService.selectByModuleId(moduleId), module));
returnMap.put("activePage",moduleId+"_interface");
returnMap.put("url", module.getId() + "-interfaceList");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/interfaceList.jsp",returnMap);
}
/**
* 静态化模块文章列表
*/
@RequestMapping("/articleList.do")
public ModelAndView staticizeModule(HttpServletRequest req, @RequestParam String moduleId,@RequestParam String category,@RequestParam int currentPage,
String type, String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Module module = moduleCache.get(moduleId);
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-文章");
// 当前类目
if( category.equals(IConst.ALL) ){
category = "";
returnMap.put("md5Category", "");
}else{
returnMap.put("md5Category", MD5.encrytMD5(category, "").substring(0, 10));
}
if(MyString.isEmpty(type)){
type = "ARTICLE";
}
if(type.equals("ARTICLE")){
// 获取所有类目
// 静态化模块文章
List<String> categorys = customModuleService.queryCategoryByModuleId(module.getId());
List<CategoryDto> categoryDtos = new ArrayList<CategoryDto>();
// 文章分类,按类目静态化
for(String c: categorys){
if(MyString.isEmpty(c)){
continue;
}
CategoryDto categoryDto = new CategoryDto();
categoryDto.setMd5Category(MD5.encrytMD5(c, "").substring(0, 10));
categoryDto.setCategory(c);
categoryDtos.add( categoryDto );
}
returnMap.put("categoryDtos", categoryDtos);
returnMap.put("activePage",module.getId()+"_article");
returnMap.put("url", module.getId() + "-articleList-"+returnMap.get("md5Category"));
}else{
returnMap.put("activePage",module.getId()+"_dictionary");
returnMap.put("url", module.getId() + "-dictionary");
}
Map<String, Object> map = Tools.getMap("moduleId", moduleId, "type", type, "category", category);
Page page = new Page(15, currentPage);
List<Article> articleList = customArticleService.queryArticle(moduleId, null, type, category, null, page);
returnMap.put("page", page);
returnMap.put("articleList", articleList);
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/articleList.jsp",returnMap);
}
/**
* 静态化文章
* @param req
* @param articleId
* @return
* @throws MyException
*/
@RequestMapping("/articleDetail.do")
public ModelAndView staticizeArticle(HttpServletRequest req, @RequestParam String articleId,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
ArticleWithBLOBs article = articleService.getById(articleId);
Module module = moduleCache.get(article.getModuleId());
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
if(article.getType().equals(ArticleType.ARTICLE.name())){
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-文章详情");
returnMap.put("article", article);
// 项目备注将静态化成网站的description
returnMap.put("description", article.getBrief());
// 模块名称将静态化成网站标题
returnMap.put("title", article.getName());
returnMap.put("activePage",module.getId()+"_article");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/articleDetail.jsp",returnMap);
}else{
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-数据字典详情");
returnMap.put("article", article);
returnMap.put("activePage",module.getId()+"_dictionary");
returnMap.put("dictionaryFields", JSONArray.toArray(JSONArray.fromObject(article.getContent()), DictionaryDto.class));
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/dictionaryDetail.jsp",returnMap);
}
}
/**
* 静态化接口详情
* @param req
* @return
* @throws MyException
*/
@RequestMapping("/interfaceDetail.do")
public ModelAndView interfaceDetail(HttpServletRequest req, @RequestParam String interfaceId,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
InterfaceWithBLOBs interFace = interfaceService.getById(interfaceId);
Module module = moduleCache.get(interFace.getModuleId());
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-接口详情");
List<InterfacePDFDto> interfaces = new ArrayList<InterfacePDFDto>();
interfaces.add(customInterfaceService.getInterDto(interFace, module, false));
returnMap.put("interfaces", interfaces);
returnMap.put("activePage",module.getId()+"_interface");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/interfaceDetail.jsp",returnMap);
}
/**
* 删除静态化
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/delStaticize.do")
@ResponseBody
public JsonResult delStaticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
Tools.deleteFile(path);
return new JsonResult(1, null );
}
/**
* 下载静态化文件
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/downloadStaticize.do")
@ResponseBody
public JsonResult downloadStaticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
File file = new File(path);
if( !file.exists()){
throw new MyException(MyError.E000057);
}
String webBasePath = req.getScheme()+"://"+req.getServerName()+":"+req.getServerPort() + req.getContextPath() +"/";
Tools.createFile(path + "/downLoad/");
//获取html,提取url,替换线上资源路径,准本下载文件夹
String[] childFilePaths = file.list();
List<String> filePaths = new ArrayList<>();
for(String childFilePath : childFilePaths){
if( !childFilePath.endsWith(".html") ){
continue;
}
String html = Tools.readFile(file.getAbsolutePath() +"/"+ childFilePath);
Tools.getHrefFromText(html, filePaths);
html = html.replaceAll(webBasePath, "");
Tools.staticize(html, path + "/downLoad/" + new File(childFilePath).getName());
}
// 拷贝资源文件
for(String sourcePath:filePaths){
if(sourcePath.startsWith(webBasePath) && !sourcePath.endsWith(".do")){
sourcePath = sourcePath.replace(webBasePath, "").split("\"")[0].split("\\?")[0].trim();
if(sourcePath.endsWith(".do")){
continue;
}
// 创建文件目录
String sourcePathFile = sourcePath.substring(0, sourcePath.lastIndexOf("/"));
Tools.createFile(path + "/downLoad/"+ sourcePathFile.replace(Tools.getServicePath(), ""));
Tools.copyFile(Tools.getServicePath() + sourcePath , path + "/downLoad/"+ sourcePath );
}
}
//压缩
Tools.createZip(path + "/downLoad/", path + "/" + projectId + ".zip");
// 返回下载页面
return new JsonResult(1, webBasePath + "static/"+project.getId() + "/" + projectId + ".zip" );
}
/**
* 静态化
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/staticize.do")
@ResponseBody
public JsonResult staticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
if(MyString.isEmpty(needStaticizes)){
needStaticizes = ",article,";
}else{
needStaticizes = ",article," + needStaticizes + ",";
}
String secretKey = settingCache.get(S_SECRETKEY).getValue();
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
Tools.createFile(path);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
// 静态化错误码// 查询页码
int pageSize = 15;
int totalPage = 0;
if(needStaticizes.indexOf(",error,") >= 0){
int errorSize = customErrorService.countByProjectId(projectId);
// 计算总页数
totalPage = (errorSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/errorList.do?projectId="+projectId
+"¤tPage="+i + "&needStaticizes=" + needStaticizes + "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/errorList-" + i + ".html");
}
}
Map<String, Object> map = new HashMap<>();
ModuleCriteria example = new ModuleCriteria();
example.createCriteria().andProjectIdEqualTo(projectId);
for(Module module : moduleService.selectByExample(example)){
if(needStaticizes.indexOf(",article,") >= 0){
// 静态化模块文章,分类
List<String> categorys = customModuleService.queryCategoryByModuleId(module.getId());
// 文章分类,按类目静态化
for(String category: categorys){
if( MyString.isEmpty( category )){
continue; // 空类目不静态化
}
// 查询页码
int articleSize = customArticleService.countByModuleId(module.getId(), null, "ARTICLE", category, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+"&category="+
category+"¤tPage="+i + "&needStaticizes="+needStaticizes + "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-articleList-"+ MD5.encrytMD5(category, "").substring(0, 10) + "-" + i + ".html");
}
}
// 文章分类,不分类
int articleSize = customArticleService.countByModuleId(module.getId(), null, "ARTICLE", null, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+
"&category="+ IConst.ALL+"¤tPage="+i + "&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-articleList--" + i + ".html");
}
// 静态化文章
for(Article article: customArticleService.queryByModuleIdAndType(module.getId(), "ARTICLE")){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleDetail.do?articleId="+ article.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + article.getId()+".html");
}
}
if(needStaticizes.indexOf(",dictionary,") >= 0){
// 数据字典列表
int articleSize = customArticleService.countByModuleId(module.getId(), null, "DICTIONARY", null, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+
"&category="+ IConst.ALL+"¤tPage="+i+"&type=DICTIONARY" + "&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-dictionaryList-" + i + ".html");
}
// 静态化数据字典详情
for(Article article: customArticleService.queryByModuleIdAndType(module.getId(), "DICTIONARY")){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleDetail.do?articleId="+ article.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + article.getId()+".html");
}
}
if(needStaticizes.indexOf("interface") >= 0){
// 接口列表
// 计算总页数
totalPage = (customInterfaceService.countByModuleId(module.getId())+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/interfaceList.do?moduleId="+ module.getId()+"¤tPage="+i +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-interfaceList-" + i + ".html");
}
// 静态化接口详情
for(Interface inter: customInterfaceService.selectByModuleId(module.getId())){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/interfaceDetail.do?interfaceId="+ inter.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + inter.getId()+".html");
}
}
// 推送给百度
// try{
// if( !config.getBaidu().equals("") )
// HttpPostGet.postBody(config.getBaidu(), config.getDomain()+"/resources/html/staticize/"+project.getId()+"/"+module.getId()+"/list.html", null);
// }catch(Exception e){
// e.printStackTrace();
// }
}
return new JsonResult(1, null );
}
private Map<String, Object> getProjectModuleInfor(Module module, Project project, String typeName) {
// 静态化
Map<String, String> settingMap = new HashMap<>();
for (SettingDto setting : settingCache.getAll()) {
settingMap.put(setting.getKey(), setting.getValue());
}
if(!MyString.isEmpty(project.getCover())){
if(!project.getCover().startsWith("http:") && !project.getCover().startsWith("https:") ){
project.setCover(config.getDomain() +"/"+ project.getCover());
}
}
settingMap.put(IConst.DOMAIN, config.getDomain());
Map<String,Object> returnMap = new HashMap<String,Object>();
returnMap.put("settings", settingMap);
returnMap.put("project", project);
returnMap.put("module", module);
// 将选中的模块放到第一位
List<Module> moduleList = customModuleService.queryByProjectId(project.getId());
if(module != null){
for(Module m:moduleList){
if(m.getId().equals(module.getId())){
moduleList.remove(m);
break;
}
}
moduleList.add(0, module);
}
returnMap.put("moduleList", moduleList);
//returnMap.put("menuList", menuService.getLeftMenu(null));
// 模块将静态化成网站的keywords
returnMap.put("keywords", module!=null ? module.getRemark():project.getRemark());
// 项目备注将静态化成网站的description
returnMap.put("description", project.getRemark());
// 模块名称将静态化成网站标题
returnMap.put("title", module!=null ? module.getName() + typeName: project.getName() + typeName);
return returnMap;
}
}
| api/src/main/java/cn/crap/controller/user/StaticizeController.java | package cn.crap.controller.user;
import cn.crap.adapter.ErrorAdapter;
import cn.crap.adapter.InterfaceAdapter;
import cn.crap.dto.CategoryDto;
import cn.crap.dto.DictionaryDto;
import cn.crap.dto.InterfacePDFDto;
import cn.crap.dto.SettingDto;
import cn.crap.enumer.ArticleType;
import cn.crap.enumer.MyError;
import cn.crap.enumer.ProjectType;
import cn.crap.framework.JsonResult;
import cn.crap.framework.MyException;
import cn.crap.framework.base.BaseController;
import cn.crap.model.mybatis.*;
import cn.crap.model.mybatis.Error;
import cn.crap.service.custom.CustomArticleService;
import cn.crap.service.custom.CustomErrorService;
import cn.crap.service.custom.CustomInterfaceService;
import cn.crap.service.custom.CustomModuleService;
import cn.crap.service.mybatis.ArticleService;
import cn.crap.service.mybatis.InterfaceService;
import cn.crap.service.mybatis.ModuleService;
import cn.crap.beans.Config;
import cn.crap.utils.*;
import net.sf.json.JSONArray;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Controller
@RequestMapping("/user/staticize")
public class StaticizeController extends BaseController{
@Autowired
private CustomInterfaceService customInterfaceService;
@Autowired
private ModuleService moduleService;
@Autowired
private Config config;
@Autowired
private InterfaceService interfaceService;
@Autowired
private CustomErrorService customErrorService;
@Autowired
private ArticleService articleService;
@Autowired
private CustomArticleService customArticleService;
@Autowired
private CustomModuleService customModuleService;
/**
* 静态化错误码列表
*/
@RequestMapping("/errorList.do")
public ModelAndView staticizeError(HttpServletRequest req, @RequestParam String projectId,@RequestParam int currentPage,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Project project = projectCache.get(projectId);
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(null, project, "-错误码");
Page page = new Page(15, currentPage);
List<Error> errorModels = customErrorService.queryByProjectId(projectId, null, null, page);
returnMap.put("page", page);
returnMap.put("errorList", ErrorAdapter.getDto(errorModels));
returnMap.put("activePage","errorList");
returnMap.put("url", "errorList");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/errorList.jsp",returnMap);
}
private String getStaticPath(Project project) {
return Tools.getServicePath() + "static/"+project.getId();
}
/**
* 静态化接口列表
*/
@RequestMapping("/interfaceList.do")
public ModelAndView interfaceList(HttpServletRequest req, @RequestParam String moduleId, @RequestParam int currentPage,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Module module = moduleCache.get(moduleId);
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-接口");
Map<String, Object> map = Tools.getMap("moduleId", moduleId);
Page page = new Page(15, currentPage);
returnMap.put("page", page);
returnMap.put("interfaceList", InterfaceAdapter.getDto(customInterfaceService.selectByModuleId(moduleId), module));
returnMap.put("activePage",moduleId+"_interface");
returnMap.put("url", module.getId() + "-interfaceList");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/interfaceList.jsp",returnMap);
}
/**
* 静态化模块文章列表
*/
@RequestMapping("/articleList.do")
public ModelAndView staticizeModule(HttpServletRequest req, @RequestParam String moduleId,@RequestParam String category,@RequestParam int currentPage,
String type, String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
Module module = moduleCache.get(moduleId);
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-文章");
// 当前类目
if( category.equals(IConst.ALL) ){
category = "";
returnMap.put("md5Category", "");
}else{
returnMap.put("md5Category", MD5.encrytMD5(category, "").substring(0, 10));
}
if(MyString.isEmpty(type)){
type = "ARTICLE";
}
if(type.equals("ARTICLE")){
// 获取所有类目
// 静态化模块文章
List<String> categorys = customModuleService.queryCategoryByModuleId(module.getId());
List<CategoryDto> categoryDtos = new ArrayList<CategoryDto>();
// 文章分类,按类目静态化
for(String c: categorys){
if(MyString.isEmpty(c)){
continue;
}
CategoryDto categoryDto = new CategoryDto();
categoryDto.setMd5Category(MD5.encrytMD5(c, "").substring(0, 10));
categoryDto.setCategory(c);
categoryDtos.add( categoryDto );
}
returnMap.put("categoryDtos", categoryDtos);
returnMap.put("activePage",module.getId()+"_article");
returnMap.put("url", module.getId() + "-articleList-"+returnMap.get("md5Category"));
}else{
returnMap.put("activePage",module.getId()+"_dictionary");
returnMap.put("url", module.getId() + "-dictionary");
}
Map<String, Object> map = Tools.getMap("moduleId", moduleId, "type", type, "category", category);
Page page = new Page(15, currentPage);
List<Article> articleList = customArticleService.queryArticle(moduleId, null, type, category, null, page);
returnMap.put("page", page);
returnMap.put("articleList", articleList);
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/articleList.jsp",returnMap);
}
/**
* 静态化文章
* @param req
* @param articleId
* @return
* @throws MyException
*/
@RequestMapping("/articleDetail.do")
public ModelAndView staticizeArticle(HttpServletRequest req, @RequestParam String articleId,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
ArticleWithBLOBs article = articleService.getById(articleId);
Module module = moduleCache.get(article.getModuleId());
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
if(article.getType().equals(ArticleType.ARTICLE.name())){
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-文章详情");
returnMap.put("article", article);
returnMap.put("activePage",module.getId()+"_article");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/articleDetail.jsp",returnMap);
}else{
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-数据字典详情");
returnMap.put("article", article);
returnMap.put("activePage",module.getId()+"_dictionary");
returnMap.put("dictionaryFields", JSONArray.toArray(JSONArray.fromObject(article.getContent()), DictionaryDto.class));
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/dictionaryDetail.jsp",returnMap);
}
}
/**
* 静态化接口详情
* @param req
* @return
* @throws MyException
*/
@RequestMapping("/interfaceDetail.do")
public ModelAndView interfaceDetail(HttpServletRequest req, @RequestParam String interfaceId,
String needStaticizes, @RequestParam String secretKey) throws MyException {
// 验证是否是非法请求
if( !settingCache.get(S_SECRETKEY).getValue().equals(secretKey) ){
throw new MyException(MyError.E000056);
}
InterfaceWithBLOBs interFace = interfaceService.getById(interfaceId);
Module module = moduleCache.get(interFace.getModuleId());
Project project = projectCache.get(module.getProjectId());
String path = getStaticPath(project);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-接口详情");
List<InterfacePDFDto> interfaces = new ArrayList<InterfacePDFDto>();
interfaces.add(customInterfaceService.getInterDto(interFace, module, false));
returnMap.put("interfaces", interfaces);
returnMap.put("activePage",module.getId()+"_interface");
returnMap.put("needStaticizes", needStaticizes);
return new ModelAndView("WEB-INF/views/staticize/default/interfaceDetail.jsp",returnMap);
}
/**
* 删除静态化
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/delStaticize.do")
@ResponseBody
public JsonResult delStaticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
Tools.deleteFile(path);
return new JsonResult(1, null );
}
/**
* 下载静态化文件
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/downloadStaticize.do")
@ResponseBody
public JsonResult downloadStaticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
File file = new File(path);
if( !file.exists()){
throw new MyException(MyError.E000057);
}
String webBasePath = req.getScheme()+"://"+req.getServerName()+":"+req.getServerPort() + req.getContextPath() +"/";
Tools.createFile(path + "/downLoad/");
//获取html,提取url,替换线上资源路径,准本下载文件夹
String[] childFilePaths = file.list();
List<String> filePaths = new ArrayList<>();
for(String childFilePath : childFilePaths){
if( !childFilePath.endsWith(".html") ){
continue;
}
String html = Tools.readFile(file.getAbsolutePath() +"/"+ childFilePath);
Tools.getHrefFromText(html, filePaths);
html = html.replaceAll(webBasePath, "");
Tools.staticize(html, path + "/downLoad/" + new File(childFilePath).getName());
}
// 拷贝资源文件
for(String sourcePath:filePaths){
if(sourcePath.startsWith(webBasePath) && !sourcePath.endsWith(".do")){
sourcePath = sourcePath.replace(webBasePath, "").split("\"")[0].split("\\?")[0].trim();
if(sourcePath.endsWith(".do")){
continue;
}
// 创建文件目录
String sourcePathFile = sourcePath.substring(0, sourcePath.lastIndexOf("/"));
Tools.createFile(path + "/downLoad/"+ sourcePathFile.replace(Tools.getServicePath(), ""));
Tools.copyFile(Tools.getServicePath() + sourcePath , path + "/downLoad/"+ sourcePath );
}
}
//压缩
Tools.createZip(path + "/downLoad/", path + "/" + projectId + ".zip");
// 返回下载页面
return new JsonResult(1, webBasePath + "static/"+project.getId() + "/" + projectId + ".zip" );
}
/**
* 静态化
* @throws Exception
* @throws UnsupportedEncodingException
*/
@RequestMapping("/staticize.do")
@ResponseBody
public JsonResult staticize(HttpServletRequest req, @RequestParam String projectId, String needStaticizes) throws UnsupportedEncodingException, Exception {
if(MyString.isEmpty(needStaticizes)){
needStaticizes = ",article,";
}else{
needStaticizes = ",article," + needStaticizes + ",";
}
String secretKey = settingCache.get(S_SECRETKEY).getValue();
Project project = projectCache.get(projectId);
checkUserPermissionByProject(project);
String path = getStaticPath(project);
Tools.createFile(path);
if(project.getType() != ProjectType.PUBLIC.getType()){
Tools.deleteFile(path);
// 删除旧的静态化文件
throw new MyException(MyError.E000044);
}
// 静态化错误码// 查询页码
int pageSize = 15;
int totalPage = 0;
if(needStaticizes.indexOf(",error,") >= 0){
int errorSize = customErrorService.countByProjectId(projectId);
// 计算总页数
totalPage = (errorSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/errorList.do?projectId="+projectId
+"¤tPage="+i + "&needStaticizes=" + needStaticizes + "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/errorList-" + i + ".html");
}
}
Map<String, Object> map = new HashMap<>();
ModuleCriteria example = new ModuleCriteria();
example.createCriteria().andProjectIdEqualTo(projectId);
for(Module module : moduleService.selectByExample(example)){
if(needStaticizes.indexOf(",article,") >= 0){
// 静态化模块文章,分类
List<String> categorys = customModuleService.queryCategoryByModuleId(module.getId());
// 文章分类,按类目静态化
for(String category: categorys){
if( MyString.isEmpty( category )){
continue; // 空类目不静态化
}
// 查询页码
int articleSize = customArticleService.countByModuleId(module.getId(), null, "ARTICLE", category, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+"&category="+
category+"¤tPage="+i + "&needStaticizes="+needStaticizes + "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-articleList-"+ MD5.encrytMD5(category, "").substring(0, 10) + "-" + i + ".html");
}
}
// 文章分类,不分类
int articleSize = customArticleService.countByModuleId(module.getId(), null, "ARTICLE", null, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+
"&category="+ IConst.ALL+"¤tPage="+i + "&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-articleList--" + i + ".html");
}
// 静态化文章
for(Article article: customArticleService.queryByModuleIdAndType(module.getId(), "ARTICLE")){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleDetail.do?articleId="+ article.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + article.getId()+".html");
}
}
if(needStaticizes.indexOf(",dictionary,") >= 0){
// 数据字典列表
int articleSize = customArticleService.countByModuleId(module.getId(), null, "DICTIONARY", null, null);
// 计算总页数
totalPage = (articleSize+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleList.do?moduleId="+ module.getId()+
"&category="+ IConst.ALL+"¤tPage="+i+"&type=DICTIONARY" + "&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-dictionaryList-" + i + ".html");
}
// 静态化数据字典详情
for(Article article: customArticleService.queryByModuleIdAndType(module.getId(), "DICTIONARY")){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/articleDetail.do?articleId="+ article.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + article.getId()+".html");
}
}
if(needStaticizes.indexOf("interface") >= 0){
// 接口列表
// 计算总页数
totalPage = (customInterfaceService.countByModuleId(module.getId())+pageSize-1)/pageSize;
if(totalPage == 0){
totalPage = 1;
}
for(int i=1 ; i<= totalPage; i++){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/interfaceList.do?moduleId="+ module.getId()+"¤tPage="+i +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
// list-类目摘要-页码
Tools.staticize(html, path + "/" + module.getId() +"-interfaceList-" + i + ".html");
}
// 静态化接口详情
for(Interface inter: customInterfaceService.selectByModuleId(module.getId())){
String html = HttpPostGet.get(config.getDomain()+ "/user/staticize/interfaceDetail.do?interfaceId="+ inter.getId() +
"&needStaticizes="+needStaticizes+ "&secretKey=" + secretKey, null, null, 10 * 1000);
Tools.staticize(html, path + "/" + inter.getId()+".html");
}
}
// 推送给百度
// try{
// if( !config.getBaidu().equals("") )
// HttpPostGet.postBody(config.getBaidu(), config.getDomain()+"/resources/html/staticize/"+project.getId()+"/"+module.getId()+"/list.html", null);
// }catch(Exception e){
// e.printStackTrace();
// }
}
return new JsonResult(1, null );
}
private Map<String, Object> getProjectModuleInfor(Module module, Project project, String typeName) {
// 静态化
Map<String, String> settingMap = new HashMap<String, String>();
for (SettingDto setting : settingCache.getAll()) {
settingMap.put(setting.getKey(), setting.getValue());
}
if(!MyString.isEmpty(project.getCover())){
if(!project.getCover().startsWith("http:") && !project.getCover().startsWith("https:") ){
project.setCover(config.getDomain() +"/"+ project.getCover());
}
}
settingMap.put(IConst.DOMAIN, config.getDomain());
Map<String,Object> returnMap = new HashMap<String,Object>();
returnMap.put("settings", settingMap);
returnMap.put("project", project);
returnMap.put("module", module);
// 将选中的模块放到第一位
List<Module> moduleList = customModuleService.queryByProjectId(project.getId());
if(module != null){
for(Module m:moduleList){
if(m.getId().equals(module.getId())){
moduleList.remove(m);
break;
}
}
moduleList.add(0, module);
}
returnMap.put("moduleList", moduleList);
//returnMap.put("menuList", menuService.getLeftMenu(null));
// 模块将静态化成网站的keywords
returnMap.put("keywords", module!=null ? module.getRemark():project.getRemark());
// 项目备注将静态化成网站的description
returnMap.put("description", project.getRemark());
// 模块名称将静态化成网站标题
returnMap.put("title", module!=null ? module.getName() + typeName: project.getName() + typeName);
return returnMap;
}
}
| 静态化 文章title修改
| api/src/main/java/cn/crap/controller/user/StaticizeController.java | 静态化 文章title修改 | <ide><path>pi/src/main/java/cn/crap/controller/user/StaticizeController.java
<ide> if(article.getType().equals(ArticleType.ARTICLE.name())){
<ide> Map<String, Object> returnMap = getProjectModuleInfor(module, project, "-文章详情");
<ide> returnMap.put("article", article);
<add> // 项目备注将静态化成网站的description
<add> returnMap.put("description", article.getBrief());
<add> // 模块名称将静态化成网站标题
<add> returnMap.put("title", article.getName());
<add>
<ide> returnMap.put("activePage",module.getId()+"_article");
<ide> returnMap.put("needStaticizes", needStaticizes);
<ide> return new ModelAndView("WEB-INF/views/staticize/default/articleDetail.jsp",returnMap);
<ide>
<ide> private Map<String, Object> getProjectModuleInfor(Module module, Project project, String typeName) {
<ide> // 静态化
<del> Map<String, String> settingMap = new HashMap<String, String>();
<add> Map<String, String> settingMap = new HashMap<>();
<ide> for (SettingDto setting : settingCache.getAll()) {
<ide> settingMap.put(setting.getKey(), setting.getValue());
<ide> } |
|
Java | bsd-3-clause | 1627262ecf84d6e2a0b644c4393f7c220c70423c | 0 | joansmith/tripleplay,tomfisher/tripleplay,tomfisher/tripleplay,tomfisher/tripleplay,joansmith/tripleplay,joansmith/tripleplay,joansmith/tripleplay,tomfisher/tripleplay,joansmith/tripleplay,tomfisher/tripleplay | //
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2012, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.util;
import playn.core.Color;
/**
* Utilities and constants for colors.
*/
public class Colors
{
/** Named versions of commonly used colors. */
public final static int
WHITE = Color.rgb(255, 255, 255),
LIGHT_GRAY = Color.rgb(192, 192, 192),
GRAY = Color.rgb(128, 128, 128),
DARK_GRAY = Color.rgb(64, 64, 64),
BLACK = Color.rgb(0, 0, 0),
RED = Color.rgb(255, 0, 0),
PINK = Color.rgb(255, 175, 175),
ORANGE = Color.rgb(255, 200, 0),
YELLOW = Color.rgb(255, 255, 0),
GREEN = Color.rgb(0, 255, 0),
MAGENTA = Color.rgb(255, 0, 255),
CYAN = Color.rgb(0, 255, 255),
BLUE = Color.rgb(0, 0, 255);
/**
* Blends two colors.
* @return a color halfway between the two colors.
*/
public static int blend (int c1, int c2) {
return Color.rgb((Color.red(c1) + Color.red(c2)) >> 1,
(Color.green(c1) + Color.green(c2)) >> 1,
(Color.blue(c1) + Color.blue(c2)) >> 1);
}
/**
* Blends two colors proportionally.
* @param p1 The percentage of the first color to use, from 0.0f to 1.0f inclusive.
*/
public static int blend (int c1, int c2, float p1) {
float p2 = 1 - p1;
return Color.rgb((int)(Color.red(c1) * p1 + Color.red(c2) * p2),
(int)(Color.green(c1) * p1 + Color.green(c2) * p2),
(int)(Color.blue(c1) * p1 + Color.blue(c2) * p2));
}
/**
* Creates a new darkened version of the given color. This is implemented by composing a new
* color consisting of the components of the original color, each multiplied by 70%. The alpha
* channel is copied from the original.
*/
public static int darker (int color) {
return Color.argb(Color.alpha(color),
Math.max((int)(Color.red(color) * DARK_FACTOR), 0),
Math.max((int)(Color.green(color) * DARK_FACTOR), 0),
Math.max((int)(Color.blue(color) * DARK_FACTOR), 0));
}
/**
* Creates a new brightened version of the given color. This is implemented by composing a new
* color consisting of the components of the original color, each multiplied by 10/7, with
* exceptions for zero-valued components. The alpha channel is copied from the original.
*/
public static int brighter (int color) {
int a = Color.alpha(color);
int r = Color.red(color), g = Color.green(color), b = Color.blue(color);
// black is a special case the just goes to dark gray
if (r == 0 && g == 0 && b == 0) return Color.argb(a, MIN_BRIGHT, MIN_BRIGHT, MIN_BRIGHT);
// bump each component up to the minumum, unless it is absent
if (r != 0) r = Math.max(MIN_BRIGHT, r);
if (g != 0) g = Math.max(MIN_BRIGHT, g);
if (b != 0) b = Math.max(MIN_BRIGHT, b);
// scale
return Color.argb(a,
Math.min((int)(r*BRIGHT_FACTOR), 255),
Math.min((int)(g*BRIGHT_FACTOR), 255),
Math.min((int)(b*BRIGHT_FACTOR), 255));
}
private static final float DARK_FACTOR = 0.7f;
private static final float BRIGHT_FACTOR = 1/DARK_FACTOR;
private static final int MIN_BRIGHT = 3; // (int)(1.0 / (1.0 - DARK_FACTOR));
}
| core/src/main/java/tripleplay/util/Colors.java | //
// Triple Play - utilities for use in PlayN-based games
// Copyright (c) 2011-2012, Three Rings Design, Inc. - All rights reserved.
// http://github.com/threerings/tripleplay/blob/master/LICENSE
package tripleplay.util;
import playn.core.Color;
/**
* Utilities and constants for colors.
*/
public class Colors
{
/** Named versions of commonly used colors. */
public final static int
WHITE = Color.rgb(255, 255, 255),
LIGHT_GRAY = Color.rgb(192, 192, 192),
GRAY = Color.rgb(128, 128, 128),
DARK_GRAY = Color.rgb(64, 64, 64),
BLACK = Color.rgb(0, 0, 0),
RED = Color.rgb(255, 0, 0),
PINK = Color.rgb(255, 175, 175),
ORANGE = Color.rgb(255, 200, 0),
YELLOW = Color.rgb(255, 255, 0),
GREEN = Color.rgb(0, 255, 0),
MAGENTA = Color.rgb(255, 0, 255),
CYAN = Color.rgb(0, 255, 255),
BLUE = Color.rgb(0, 0, 255);
/**
* Blends the two supplied colors.
*
* @return a color halfway between the two colors.
*/
public static int blend (int c1, int c2) {
return Color.rgb((Color.red(c1) + Color.red(c2)) >> 1,
(Color.green(c1) + Color.green(c2)) >> 1,
(Color.blue(c1) + Color.blue(c2)) >> 1);
}
/**
* Blends the two supplied colors, using the supplied percentage
* as the amount of the first color to use.
*
* @param firstperc The percentage of the first color to use, from 0.0f
* to 1.0f inclusive.
*/
public static int blend (int c1, int c2, float firstperc) {
float p2 = 1.0f - firstperc;
return Color.rgb((int)(Color.red(c1) * firstperc + Color.red(c2) * p2),
(int)(Color.green(c1) * firstperc + Color.green(c2) * p2),
(int)(Color.blue(c1) * firstperc + Color.blue(c2) * p2));
}
/**
* Creates a new darkened version of the given color. This is implemented by composing a new
* color consisting of the components of the original color, each multiplied by 70%. The alpha
* channel is copied from the original.
*/
public static int darker (int color) {
return Color.argb(Color.alpha(color),
Math.max((int)(Color.red(color) * DARK_FACTOR), 0),
Math.max((int)(Color.green(color) * DARK_FACTOR), 0),
Math.max((int)(Color.blue(color) * DARK_FACTOR), 0));
}
/**
* Creates a new brightened version of the given color. This is implemented by composing a new
* color consisting of the components of the original color, each multiplied by 10/7, with
* exceptions for zero-valued components. The alpha channel is copied from the original.
*/
public static int brighter (int color) {
int a = Color.alpha(color);
int r = Color.red(color), g = Color.green(color), b = Color.blue(color);
// black is a special case the just goes to dark gray
if (r == 0 && g == 0 && b == 0) return Color.argb(a, MIN_BRIGHT, MIN_BRIGHT, MIN_BRIGHT);
// bump each component up to the minumum, unless it is absent
if (r != 0) r = Math.max(MIN_BRIGHT, r);
if (g != 0) g = Math.max(MIN_BRIGHT, g);
if (b != 0) b = Math.max(MIN_BRIGHT, b);
// scale
return Color.argb(a,
Math.min((int)(r*BRIGHT_FACTOR), 255),
Math.min((int)(g*BRIGHT_FACTOR), 255),
Math.min((int)(b*BRIGHT_FACTOR), 255));
}
private static final float DARK_FACTOR = 0.7f;
private static final float BRIGHT_FACTOR = 1/DARK_FACTOR;
private static final int MIN_BRIGHT = 3; // (int)(1.0 / (1.0 - DARK_FACTOR));
}
| Tidy up blend methods
| core/src/main/java/tripleplay/util/Colors.java | Tidy up blend methods | <ide><path>ore/src/main/java/tripleplay/util/Colors.java
<ide> BLUE = Color.rgb(0, 0, 255);
<ide>
<ide> /**
<del> * Blends the two supplied colors.
<del> *
<add> * Blends two colors.
<ide> * @return a color halfway between the two colors.
<ide> */
<ide> public static int blend (int c1, int c2) {
<ide> }
<ide>
<ide> /**
<del> * Blends the two supplied colors, using the supplied percentage
<del> * as the amount of the first color to use.
<del> *
<del> * @param firstperc The percentage of the first color to use, from 0.0f
<del> * to 1.0f inclusive.
<add> * Blends two colors proportionally.
<add> * @param p1 The percentage of the first color to use, from 0.0f to 1.0f inclusive.
<ide> */
<del> public static int blend (int c1, int c2, float firstperc) {
<del> float p2 = 1.0f - firstperc;
<del> return Color.rgb((int)(Color.red(c1) * firstperc + Color.red(c2) * p2),
<del> (int)(Color.green(c1) * firstperc + Color.green(c2) * p2),
<del> (int)(Color.blue(c1) * firstperc + Color.blue(c2) * p2));
<add> public static int blend (int c1, int c2, float p1) {
<add> float p2 = 1 - p1;
<add> return Color.rgb((int)(Color.red(c1) * p1 + Color.red(c2) * p2),
<add> (int)(Color.green(c1) * p1 + Color.green(c2) * p2),
<add> (int)(Color.blue(c1) * p1 + Color.blue(c2) * p2));
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | fd82129cc5dbf961d0526c3f2646199d464ed7d8 | 0 | sangramjadhav/testrs | 226a482e-2ece-11e5-905b-74de2bd44bed | hello.java | 2269ad9c-2ece-11e5-905b-74de2bd44bed | 226a482e-2ece-11e5-905b-74de2bd44bed | hello.java | 226a482e-2ece-11e5-905b-74de2bd44bed | <ide><path>ello.java
<del>2269ad9c-2ece-11e5-905b-74de2bd44bed
<add>226a482e-2ece-11e5-905b-74de2bd44bed |
|
Java | mit | 4031a97ac6f8b34c7bff24ca07b17acf4a835cef | 0 | chargebee/chargebee-java | /*
* Copyright (c) 2012 chargebee.com
* All Rights Reserved.
*/
package com.chargebee.models;
import com.chargebee.*;
import com.chargebee.internal.*;
import com.chargebee.internal.HttpUtil.Method;
import com.chargebee.models.enums.*;
import org.json.*;
import java.io.*;
import java.sql.Timestamp;
import java.util.*;
public class Transaction extends Resource<Transaction> {
public enum Type {
AUTHORIZE,
PAYMENT,
REFUND,
_UNKNOWN; /*Indicates unexpected value for this enum. You can get this when there is a
java-client version incompatibility. We suggest you to upgrade to the latest version */
}
public enum Status {
SUCCESS,
VOIDED,
FAILURE,
_UNKNOWN; /*Indicates unexpected value for this enum. You can get this when there is a
java-client version incompatibility. We suggest you to upgrade to the latest version */
}
//Constructors
//============
public Transaction(String jsonStr) {
super(jsonStr);
}
public Transaction(JSONObject jsonObj) {
super(jsonObj);
}
// Fields
//=======
public String id() {
return reqString("id");
}
public String subscriptionId() {
return reqString("subscription_id");
}
public Gateway gateway() {
return reqEnum("gateway", Gateway.class);
}
public String invoiceId() {
return reqString("invoice_id");
}
public Type type() {
return reqEnum("type", Type.class);
}
public Timestamp date() {
return optTimestamp("date");
}
public Integer amount() {
return optInteger("amount");
}
public String idAtGateway() {
return optString("id_at_gateway");
}
public String maskedCardNumber() {
return reqString("masked_card_number");
}
public String errorCode() {
return optString("error_code");
}
public String errorText() {
return optString("error_text");
}
public Long refundedTxId() {
return optLong("refunded_tx_id");
}
public String refundMemo() {
return optString("refund_memo");
}
public Timestamp voidedAt() {
return optTimestamp("voided_at");
}
public Status status() {
return reqEnum("status", Status.class);
}
// Operations
//===========
public static ListRequest list() throws IOException {
String url = url("transactions");
return new ListRequest(url);
}
public static Request retrieve(String id) throws IOException {
String url = url("transactions", nullCheck(id));
return new Request(Method.GET, url);
}
public static ListRequest transactionsForSubscription(String id) throws IOException {
String url = url("subscriptions", nullCheck(id), "transactions");
return new ListRequest(url);
}
}
| src/main/java/com/chargebee/models/Transaction.java | /*
* Copyright (c) 2012 chargebee.com
* All Rights Reserved.
*/
package com.chargebee.models;
import com.chargebee.*;
import com.chargebee.internal.*;
import com.chargebee.internal.HttpUtil.Method;
import com.chargebee.models.enums.*;
import org.json.*;
import java.io.*;
import java.sql.Timestamp;
import java.util.*;
public class Transaction extends Resource<Transaction> {
public enum Type {
AUTHORIZE,
PAYMENT,
REFUND,
_UNKNOWN; /*Indicates unexpected value for this enum. You can get this when there is a
java-client version incompatibility. We suggest you to upgrade to the latest version */
}
public enum Status {
SUCCESS,
VOIDED,
FAILURE,
_UNKNOWN; /*Indicates unexpected value for this enum. You can get this when there is a
java-client version incompatibility. We suggest you to upgrade to the latest version */
}
//Constructors
//============
public Transaction(String jsonStr) {
super(jsonStr);
}
public Transaction(JSONObject jsonObj) {
super(jsonObj);
}
// Fields
//=======
public String id() {
return reqString("id");
}
public String subscriptionId() {
return reqString("subscription_id");
}
public Gateway gateway() {
return reqEnum("gateway", Gateway.class);
}
public Long invoiceId() {
return reqLong("invoice_id");
}
public Type type() {
return reqEnum("type", Type.class);
}
public Timestamp date() {
return optTimestamp("date");
}
public Integer amount() {
return optInteger("amount");
}
public String idAtGateway() {
return optString("id_at_gateway");
}
public String maskedCardNumber() {
return reqString("masked_card_number");
}
public String errorCode() {
return optString("error_code");
}
public String errorText() {
return optString("error_text");
}
public Long refundedTxId() {
return optLong("refunded_tx_id");
}
public String refundMemo() {
return optString("refund_memo");
}
public Timestamp voidedAt() {
return optTimestamp("voided_at");
}
public Status status() {
return reqEnum("status", Status.class);
}
// Operations
//===========
public static ListRequest list() throws IOException {
String url = url("transactions");
return new ListRequest(url);
}
public static Request retrieve(String id) throws IOException {
String url = url("transactions", nullCheck(id));
return new Request(Method.GET, url);
}
public static ListRequest transactionsForSubscription(String id) throws IOException {
String url = url("subscriptions", nullCheck(id), "transactions");
return new ListRequest(url);
}
}
| changes | src/main/java/com/chargebee/models/Transaction.java | changes | <ide><path>rc/main/java/com/chargebee/models/Transaction.java
<ide> return reqEnum("gateway", Gateway.class);
<ide> }
<ide>
<del> public Long invoiceId() {
<del> return reqLong("invoice_id");
<add> public String invoiceId() {
<add> return reqString("invoice_id");
<ide> }
<ide>
<ide> public Type type() { |
|
Java | lgpl-2.1 | 5adacc3b336bacbe30aa06175ef80c3aac08a62a | 0 | MinecraftForge/FML | /*
* Forge Mod Loader
* Copyright (c) 2012-2013 cpw.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors:
* cpw - implementation
*/
package cpw.mods.fml.common.network.internal;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.Level;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.SetMultimap;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.ModContainer;
import cpw.mods.fml.common.discovery.ASMDataTable;
import cpw.mods.fml.common.discovery.ASMDataTable.ASMData;
import cpw.mods.fml.common.network.NetworkCheckHandler;
import cpw.mods.fml.common.versioning.DefaultArtifactVersion;
import cpw.mods.fml.common.versioning.InvalidVersionSpecificationException;
import cpw.mods.fml.common.versioning.VersionRange;
import cpw.mods.fml.relauncher.Side;
public class NetworkModHolder
{
private abstract class NetworkChecker {
public abstract boolean check(Map<String,String> remoteVersions, Side side);
}
private class IgnoredChecker extends NetworkChecker {
@Override
public boolean check(Map<String, String> remoteVersions, Side side)
{
return true;
}
}
private class DefaultNetworkChecker extends NetworkChecker {
@Override
public boolean check(Map<String,String> remoteVersions, Side side)
{
return remoteVersions.containsKey(container.getModId()) ? acceptVersion(remoteVersions.get(container.getModId())) : false;
}
}
private class MethodNetworkChecker extends NetworkChecker {
@Override
public boolean check(Map<String,String> remoteVersions, Side side)
{
try
{
return (Boolean) checkHandler.invoke(container.getMod(), remoteVersions, side);
}
catch (Exception e)
{
FMLLog.log(Level.ERROR, e, "Error occurred invoking NetworkCheckHandler %s at %s", checkHandler.getName(), container);
return false;
}
}
}
private static int assignedIds = 1;
private int localId;
private int networkId;
private ModContainer container;
private Method checkHandler;
private VersionRange acceptableRange;
private NetworkChecker checker;
public NetworkModHolder(ModContainer container)
{
this.container = container;
this.localId = assignedIds++;
this.networkId = this.localId;
}
public NetworkModHolder(ModContainer container, Class<?> modClass, String acceptableVersionRange, ASMDataTable table)
{
this(container);
SetMultimap<String, ASMData> annotationTable = table.getAnnotationsFor(container);
Set<ASMData> versionCheckHandlers;
if (annotationTable != null)
{
versionCheckHandlers = annotationTable.get(NetworkCheckHandler.class.getName());
}
else
{
versionCheckHandlers = ImmutableSet.of();
}
String networkCheckHandlerMethod = null;
for (ASMData vch : versionCheckHandlers)
{
if (vch.getClassName().equals(modClass.getName()))
{
networkCheckHandlerMethod = vch.getObjectName();
networkCheckHandlerMethod = networkCheckHandlerMethod.substring(0,networkCheckHandlerMethod.indexOf('('));
break;
}
}
if (versionCheckHandlers.isEmpty())
{
for (Method m : modClass.getMethods())
{
if (m.isAnnotationPresent(NetworkCheckHandler.class))
{
if (m.getParameterTypes().length == 2 && m.getParameterTypes()[0].equals(Map.class) && m.getParameterTypes()[1].equals(Side.class))
{
this.checkHandler = m;
break;
}
else
{
FMLLog.severe("Found unexpected method signature for annotation NetworkCheckHandler");
}
}
}
}
if (networkCheckHandlerMethod != null)
{
try
{
Method checkHandlerMethod = modClass.getDeclaredMethod(networkCheckHandlerMethod, Map.class, Side.class);
if (checkHandlerMethod.isAnnotationPresent(NetworkCheckHandler.class))
{
this.checkHandler = checkHandlerMethod;
}
}
catch (Exception e)
{
FMLLog.log(Level.WARN, e, "The declared version check handler method %s on network mod id %s is not accessible", networkCheckHandlerMethod, container.getModId());
}
}
if (this.checkHandler != null)
{
this.checker = new MethodNetworkChecker();
} else if (!Strings.isNullOrEmpty(acceptableVersionRange) && !acceptableVersionRange.equals('*'))
{
try
{
this.acceptableRange = VersionRange.createFromVersionSpec(acceptableVersionRange);
}
catch (InvalidVersionSpecificationException e)
{
FMLLog.log(Level.WARN, e, "Invalid bounded range %s specified for network mod id %s", acceptableVersionRange, container.getModId());
}
this.checker = new DefaultNetworkChecker();
} else {
this.checker = new IgnoredChecker();
}
FMLLog.finer("Testing mod %s to verify it accepts its own version in a remote connection", container.getModId());
boolean acceptsSelf = acceptVersion(container.getVersion());
if (!acceptsSelf)
{
FMLLog.severe("The mod %s appears to reject its own version number (%s) in its version handling. This is likely a severe bug in the mod!", container.getModId(), container.getVersion());
}
else
{
FMLLog.finer("The mod %s accepts its own version (%s)", container.getModId(), container.getVersion());
}
}
public boolean acceptVersion(String version)
{
if (acceptableRange!=null)
{
return acceptableRange.containsVersion(new DefaultArtifactVersion(version));
}
return container.getVersion().equals(version);
}
public boolean check(Map<String,String> data, Side side)
{
return checker.check(data, side);
}
public int getLocalId()
{
return localId;
}
public int getNetworkId()
{
return networkId;
}
public ModContainer getContainer()
{
return container;
}
public void setNetworkId(int value)
{
this.networkId = value;
}
}
| src/main/java/cpw/mods/fml/common/network/internal/NetworkModHolder.java | /*
* Forge Mod Loader
* Copyright (c) 2012-2013 cpw.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors:
* cpw - implementation
*/
package cpw.mods.fml.common.network.internal;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.Level;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.SetMultimap;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.ModContainer;
import cpw.mods.fml.common.discovery.ASMDataTable;
import cpw.mods.fml.common.discovery.ASMDataTable.ASMData;
import cpw.mods.fml.common.network.NetworkCheckHandler;
import cpw.mods.fml.common.versioning.DefaultArtifactVersion;
import cpw.mods.fml.common.versioning.InvalidVersionSpecificationException;
import cpw.mods.fml.common.versioning.VersionRange;
import cpw.mods.fml.relauncher.Side;
public class NetworkModHolder
{
private abstract class NetworkChecker {
public abstract boolean check(Map<String,String> remoteVersions, Side side);
}
private class IgnoredChecker extends NetworkChecker {
@Override
public boolean check(Map<String, String> remoteVersions, Side side)
{
return true;
}
}
private class DefaultNetworkChecker extends NetworkChecker {
@Override
public boolean check(Map<String,String> remoteVersions, Side side)
{
return remoteVersions.containsKey(container.getModId()) ? acceptVersion(remoteVersions.get(container.getModId())) : false;
}
}
private class MethodNetworkChecker extends NetworkChecker {
@Override
public boolean check(Map<String,String> remoteVersions, Side side)
{
try
{
return (Boolean) checkHandler.invoke(container, remoteVersions, side);
}
catch (Exception e)
{
FMLLog.log(Level.ERROR, e, "Error occurred invoking NetworkCheckHandler %s at %s", checkHandler.getName(), container);
return false;
}
}
}
private static int assignedIds = 1;
private int localId;
private int networkId;
private ModContainer container;
private Method checkHandler;
private VersionRange acceptableRange;
private NetworkChecker checker;
public NetworkModHolder(ModContainer container)
{
this.container = container;
this.localId = assignedIds++;
this.networkId = this.localId;
}
public NetworkModHolder(ModContainer container, Class<?> modClass, String acceptableVersionRange, ASMDataTable table)
{
this(container);
SetMultimap<String, ASMData> annotationTable = table.getAnnotationsFor(container);
Set<ASMData> versionCheckHandlers;
if (annotationTable != null)
{
versionCheckHandlers = annotationTable.get(NetworkCheckHandler.class.getName());
}
else
{
versionCheckHandlers = ImmutableSet.of();
}
String networkCheckHandlerMethod = null;
for (ASMData vch : versionCheckHandlers)
{
if (vch.getClassName().equals(modClass.getName()))
{
networkCheckHandlerMethod = vch.getObjectName();
networkCheckHandlerMethod = networkCheckHandlerMethod.substring(0,networkCheckHandlerMethod.indexOf('('));
break;
}
}
if (versionCheckHandlers.isEmpty())
{
for (Method m : modClass.getMethods())
{
if (m.isAnnotationPresent(NetworkCheckHandler.class))
{
if (m.getParameterTypes().length == 2 && m.getParameterTypes()[0].equals(Map.class) && m.getParameterTypes()[1].equals(Side.class))
{
this.checkHandler = m;
break;
}
else
{
FMLLog.severe("Found unexpected method signature for annotation NetworkCheckHandler");
}
}
}
}
if (networkCheckHandlerMethod != null)
{
try
{
Method checkHandlerMethod = modClass.getDeclaredMethod(networkCheckHandlerMethod, Map.class, Side.class);
if (checkHandlerMethod.isAnnotationPresent(NetworkCheckHandler.class))
{
this.checkHandler = checkHandlerMethod;
}
}
catch (Exception e)
{
FMLLog.log(Level.WARN, e, "The declared version check handler method %s on network mod id %s is not accessible", networkCheckHandlerMethod, container.getModId());
}
}
if (this.checkHandler != null)
{
this.checker = new MethodNetworkChecker();
} else if (!Strings.isNullOrEmpty(acceptableVersionRange) && !acceptableVersionRange.equals('*'))
{
try
{
this.acceptableRange = VersionRange.createFromVersionSpec(acceptableVersionRange);
}
catch (InvalidVersionSpecificationException e)
{
FMLLog.log(Level.WARN, e, "Invalid bounded range %s specified for network mod id %s", acceptableVersionRange, container.getModId());
}
this.checker = new DefaultNetworkChecker();
} else {
this.checker = new IgnoredChecker();
}
FMLLog.finer("Testing mod %s to verify it accepts its own version in a remote connection", container.getModId());
boolean acceptsSelf = acceptVersion(container.getVersion());
if (!acceptsSelf)
{
FMLLog.severe("The mod %s appears to reject its own version number (%s) in its version handling. This is likely a severe bug in the mod!", container.getModId(), container.getVersion());
}
else
{
FMLLog.finer("The mod %s accepts its own version (%s)", container.getModId(), container.getVersion());
}
}
public boolean acceptVersion(String version)
{
if (acceptableRange!=null)
{
return acceptableRange.containsVersion(new DefaultArtifactVersion(version));
}
return container.getVersion().equals(version);
}
public boolean check(Map<String,String> data, Side side)
{
return checker.check(data, side);
}
public int getLocalId()
{
return localId;
}
public int getNetworkId()
{
return networkId;
}
public ModContainer getContainer()
{
return container;
}
public void setNetworkId(int value)
{
this.networkId = value;
}
}
| Check the mod, not it's container, in the check handler. Closes #358
| src/main/java/cpw/mods/fml/common/network/internal/NetworkModHolder.java | Check the mod, not it's container, in the check handler. Closes #358 | <ide><path>rc/main/java/cpw/mods/fml/common/network/internal/NetworkModHolder.java
<ide> {
<ide> try
<ide> {
<del> return (Boolean) checkHandler.invoke(container, remoteVersions, side);
<add> return (Boolean) checkHandler.invoke(container.getMod(), remoteVersions, side);
<ide> }
<ide> catch (Exception e)
<ide> { |
|
Java | apache-2.0 | 27770369091fd063503ccea4cca6a815a03ca79f | 0 | tsmgeek/traccar,renaudallard/traccar,joseant/traccar-1,duke2906/traccar,orcoliver/traccar,tananaev/traccar,al3x1s/traccar,jon-stumpf/traccar,AnshulJain1985/Roadcast-Tracker,duke2906/traccar,vipien/traccar,jssenyange/traccar,tananaev/traccar,jssenyange/traccar,jssenyange/traccar,renaudallard/traccar,stalien/traccar_test,tsmgeek/traccar,ninioe/traccar,stalien/traccar_test,orcoliver/traccar,ninioe/traccar,al3x1s/traccar,vipien/traccar,jon-stumpf/traccar,ninioe/traccar,tananaev/traccar,AnshulJain1985/Roadcast-Tracker,tsmgeek/traccar,joseant/traccar-1,orcoliver/traccar,5of9/traccar,jon-stumpf/traccar,5of9/traccar | /*
* Copyright 2012 - 2016 Anton Tananaev ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.database;
import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import liquibase.Contexts;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.exception.LiquibaseException;
import liquibase.resource.FileSystemResourceAccessor;
import liquibase.resource.ResourceAccessor;
import org.traccar.Config;
import org.traccar.Context;
import org.traccar.helper.Log;
import org.traccar.model.Device;
import org.traccar.model.DevicePermission;
import org.traccar.model.Event;
import org.traccar.model.Group;
import org.traccar.model.GroupPermission;
import org.traccar.model.Position;
import org.traccar.model.Server;
import org.traccar.model.User;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class DataManager implements IdentityManager {
private static final long DEFAULT_REFRESH_DELAY = 300;
private final Config config;
private DataSource dataSource;
private final long dataRefreshDelay;
private final ReadWriteLock devicesLock = new ReentrantReadWriteLock();
private final Map<Long, Device> devicesById = new HashMap<>();
private final Map<String, Device> devicesByUniqueId = new HashMap<>();
private long devicesLastUpdate;
private final ReadWriteLock groupsLock = new ReentrantReadWriteLock();
private final Map<Long, Group> groupsById = new HashMap<>();
private long groupsLastUpdate;
public DataManager(Config config) throws Exception {
this.config = config;
initDatabase();
initDatabaseSchema();
dataRefreshDelay = config.getLong("database.refreshDelay", DEFAULT_REFRESH_DELAY) * 1000;
}
public DataSource getDataSource() {
return dataSource;
}
private void initDatabase() throws Exception {
String jndiName = config.getString("database.jndi");
if (jndiName != null) {
dataSource = (DataSource) new InitialContext().lookup(jndiName);
} else {
String driverFile = config.getString("database.driverFile");
if (driverFile != null) {
URLClassLoader classLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
method.setAccessible(true);
method.invoke(classLoader, new File(driverFile).toURI().toURL());
}
String driver = config.getString("database.driver");
if (driver != null) {
Class.forName(driver);
}
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setDriverClassName(config.getString("database.driver"));
hikariConfig.setJdbcUrl(config.getString("database.url"));
hikariConfig.setUsername(config.getString("database.user"));
hikariConfig.setPassword(config.getString("database.password"));
hikariConfig.setConnectionInitSql("SELECT 1 FROM DUAL");
hikariConfig.setIdleTimeout(600000);
int maxPoolSize = config.getInteger("database.maxPoolSize");
if (maxPoolSize != 0) {
hikariConfig.setMaximumPoolSize(maxPoolSize);
}
dataSource = new HikariDataSource(hikariConfig);
}
}
private void updateDeviceCache(boolean force) throws SQLException {
boolean needWrite;
devicesLock.readLock().lock();
try {
needWrite = force || System.currentTimeMillis() - devicesLastUpdate > dataRefreshDelay;
} finally {
devicesLock.readLock().unlock();
}
if (needWrite) {
devicesLock.writeLock().lock();
try {
if (force || System.currentTimeMillis() - devicesLastUpdate > dataRefreshDelay) {
devicesById.clear();
devicesByUniqueId.clear();
for (Device device : getAllDevices()) {
devicesById.put(device.getId(), device);
devicesByUniqueId.put(device.getUniqueId(), device);
}
devicesLastUpdate = System.currentTimeMillis();
}
} finally {
devicesLock.writeLock().unlock();
}
}
}
@Override
public Device getDeviceById(long id) {
boolean forceUpdate;
devicesLock.readLock().lock();
try {
forceUpdate = !devicesById.containsKey(id);
} finally {
devicesLock.readLock().unlock();
}
try {
updateDeviceCache(forceUpdate);
} catch (SQLException e) {
Log.warning(e);
}
devicesLock.readLock().lock();
try {
return devicesById.get(id);
} finally {
devicesLock.readLock().unlock();
}
}
@Override
public Device getDeviceByUniqueId(String uniqueId) throws SQLException {
boolean forceUpdate;
devicesLock.readLock().lock();
try {
forceUpdate = !devicesByUniqueId.containsKey(uniqueId) && !config.getBoolean("database.ignoreUnknown");
} finally {
devicesLock.readLock().unlock();
}
updateDeviceCache(forceUpdate);
devicesLock.readLock().lock();
try {
return devicesByUniqueId.get(uniqueId);
} finally {
devicesLock.readLock().unlock();
}
}
private void updateGroupCache(boolean force) throws SQLException {
boolean needWrite;
groupsLock.readLock().lock();
try {
needWrite = force || System.currentTimeMillis() - groupsLastUpdate > dataRefreshDelay;
} finally {
groupsLock.readLock().unlock();
}
if (needWrite) {
groupsLock.writeLock().lock();
try {
if (force || System.currentTimeMillis() - groupsLastUpdate > dataRefreshDelay) {
groupsById.clear();
for (Group group : getAllGroups()) {
groupsById.put(group.getId(), group);
}
groupsLastUpdate = System.currentTimeMillis();
}
} finally {
groupsLock.writeLock().unlock();
}
}
}
public Group getGroupById(long id) {
boolean forceUpdate;
groupsLock.readLock().lock();
try {
forceUpdate = !groupsById.containsKey(id);
} finally {
groupsLock.readLock().unlock();
}
try {
updateGroupCache(forceUpdate);
} catch (SQLException e) {
Log.warning(e);
}
groupsLock.readLock().lock();
try {
return groupsById.get(id);
} finally {
groupsLock.readLock().unlock();
}
}
private String getQuery(String key) {
String query = config.getString(key);
if (query == null) {
Log.info("Query not provided: " + key);
}
return query;
}
private void initDatabaseSchema() throws SQLException, LiquibaseException {
if (config.hasKey("database.changelog")) {
ResourceAccessor resourceAccessor = new FileSystemResourceAccessor();
Database database = DatabaseFactory.getInstance().openDatabase(
config.getString("database.url"),
config.getString("database.user"),
config.getString("database.password"),
null, resourceAccessor);
Liquibase liquibase = new Liquibase(
config.getString("database.changelog"), resourceAccessor, database);
liquibase.clearCheckSums();
liquibase.update(new Contexts());
}
}
public User login(String email, String password) throws SQLException {
User user = QueryBuilder.create(dataSource, getQuery("database.loginUser"))
.setString("email", email)
.executeQuerySingle(User.class);
if (user != null && user.isPasswordValid(password)) {
return user;
} else {
return null;
}
}
public Collection<User> getUsers() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectUsersAll"))
.executeQuery(User.class);
}
public User getUser(long userId) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectUser"))
.setLong("id", userId)
.executeQuerySingle(User.class);
}
public void addUser(User user) throws SQLException {
user.setId(QueryBuilder.create(dataSource, getQuery("database.insertUser"), true)
.setObject(user)
.executeUpdate());
}
public void updateUser(User user) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateUser"))
.setObject(user)
.executeUpdate();
if (user.getHashedPassword() != null) {
QueryBuilder.create(dataSource, getQuery("database.updateUserPassword"))
.setObject(user)
.executeUpdate();
}
}
public void removeUser(long userId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteUser"))
.setLong("id", userId)
.executeUpdate();
}
public Collection<DevicePermission> getDevicePermissions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectDevicePermissions"))
.executeQuery(DevicePermission.class);
}
public Collection<GroupPermission> getGroupPermissions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectGroupPermissions"))
.executeQuery(GroupPermission.class);
}
public Collection<Device> getAllDevices() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectDevicesAll"))
.executeQuery(Device.class);
}
public Collection<Device> getDevices(long userId) throws SQLException {
Collection<Device> devices = new ArrayList<>();
for (long id : Context.getPermissionsManager().getDevicePermissions(userId)) {
devices.add(getDeviceById(id));
}
return devices;
}
public void addDevice(Device device) throws SQLException {
device.setId(QueryBuilder.create(dataSource, getQuery("database.insertDevice"), true)
.setObject(device)
.executeUpdate());
updateDeviceCache(true);
}
public void updateDevice(Device device) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateDevice"))
.setObject(device)
.executeUpdate();
updateDeviceCache(true);
}
public void updateDeviceStatus(Device device) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateDeviceStatus"))
.setObject(device)
.executeUpdate();
Device cachedDevice = getDeviceById(device.getId());
cachedDevice.setStatus(device.getStatus());
cachedDevice.setMotion(device.getMotion());
}
public void removeDevice(long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteDevice"))
.setLong("id", deviceId)
.executeUpdate();
updateDeviceCache(true);
}
public void linkDevice(long userId, long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.linkDevice"))
.setLong("userId", userId)
.setLong("deviceId", deviceId)
.executeUpdate();
}
public void unlinkDevice(long userId, long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.unlinkDevice"))
.setLong("userId", userId)
.setLong("deviceId", deviceId)
.executeUpdate();
}
public Collection<Group> getAllGroups() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectGroupsAll"))
.executeQuery(Group.class);
}
public Collection<Group> getGroups(long userId) throws SQLException {
Collection<Group> groups = new ArrayList<>();
for (long id : Context.getPermissionsManager().getGroupPermissions(userId)) {
groups.add(getGroupById(id));
}
return groups;
}
private void checkGroupCycles(Group group) {
groupsLock.readLock().lock();
try {
Set<Long> groups = new HashSet<>();
while (group != null) {
if (groups.contains(group.getId())) {
throw new IllegalArgumentException("Cycle in group hierarchy");
}
groups.add(group.getId());
group = groupsById.get(group.getGroupId());
}
} finally {
groupsLock.readLock().unlock();
}
}
public void addGroup(Group group) throws SQLException {
checkGroupCycles(group);
group.setId(QueryBuilder.create(dataSource, getQuery("database.insertGroup"), true)
.setObject(group)
.executeUpdate());
updateGroupCache(true);
}
public void updateGroup(Group group) throws SQLException {
checkGroupCycles(group);
QueryBuilder.create(dataSource, getQuery("database.updateGroup"))
.setObject(group)
.executeUpdate();
updateGroupCache(true);
}
public void removeGroup(long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteGroup"))
.setLong("id", groupId)
.executeUpdate();
}
public void linkGroup(long userId, long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.linkGroup"))
.setLong("userId", userId)
.setLong("groupId", groupId)
.executeUpdate();
}
public void unlinkGroup(long userId, long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.unlinkGroup"))
.setLong("userId", userId)
.setLong("groupId", groupId)
.executeUpdate();
}
public Collection<Position> getPositions(long deviceId, Date from, Date to) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectPositions"))
.setLong("deviceId", deviceId)
.setDate("from", from)
.setDate("to", to)
.executeQuery(Position.class);
}
public void addPosition(Position position) throws SQLException {
position.setId(QueryBuilder.create(dataSource, getQuery("database.insertPosition"), true)
.setDate("now", new Date())
.setObject(position)
.executeUpdate());
}
public void updateLatestPosition(Position position) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateLatestPosition"))
.setDate("now", new Date())
.setObject(position)
.executeUpdate();
Device device = getDeviceById(position.getDeviceId());
device.setPositionId(position.getId());
}
public Collection<Position> getLatestPositions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectLatestPositions"))
.executeQuery(Position.class);
}
public Server getServer() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectServers"))
.executeQuerySingle(Server.class);
}
public void updateServer(Server server) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateServer"))
.setObject(server)
.executeUpdate();
}
public Event getEvent(long eventId) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectEvent"))
.setLong("id", eventId)
.executeQuerySingle(Event.class);
}
public void addEvent(Event event) throws SQLException {
event.setId(QueryBuilder.create(dataSource, getQuery("database.insertEvent"), true)
.setObject(event)
.executeUpdate());
}
public Collection<Event> getEvents(long deviceId, String type, Date from, Date to) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectEvents"))
.setLong("deviceId", deviceId)
.setString("type", type)
.setDate("from", from)
.setDate("to", to)
.executeQuery(Event.class);
}
public Collection<Event> getLastEvents(long deviceId, String type, int interval) throws SQLException {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.SECOND, -interval);
Date to = calendar.getTime();
return getEvents(deviceId, type, new Date(), to);
}
}
| src/org/traccar/database/DataManager.java | /*
* Copyright 2012 - 2016 Anton Tananaev ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.database;
import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.naming.InitialContext;
import javax.sql.DataSource;
import liquibase.Contexts;
import liquibase.Liquibase;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.exception.LiquibaseException;
import liquibase.resource.FileSystemResourceAccessor;
import liquibase.resource.ResourceAccessor;
import org.traccar.Config;
import org.traccar.Context;
import org.traccar.helper.Log;
import org.traccar.model.Device;
import org.traccar.model.DevicePermission;
import org.traccar.model.Event;
import org.traccar.model.Group;
import org.traccar.model.GroupPermission;
import org.traccar.model.Position;
import org.traccar.model.Server;
import org.traccar.model.User;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class DataManager implements IdentityManager {
private static final long DEFAULT_REFRESH_DELAY = 300;
private final Config config;
private DataSource dataSource;
private final long dataRefreshDelay;
private final ReadWriteLock devicesLock = new ReentrantReadWriteLock();
private final Map<Long, Device> devicesById = new HashMap<>();
private final Map<String, Device> devicesByUniqueId = new HashMap<>();
private long devicesLastUpdate;
private final ReadWriteLock groupsLock = new ReentrantReadWriteLock();
private final Map<Long, Group> groupsById = new HashMap<>();
private long groupsLastUpdate;
public DataManager(Config config) throws Exception {
this.config = config;
initDatabase();
initDatabaseSchema();
dataRefreshDelay = config.getLong("database.refreshDelay", DEFAULT_REFRESH_DELAY) * 1000;
}
public DataSource getDataSource() {
return dataSource;
}
private void initDatabase() throws Exception {
String jndiName = config.getString("database.jndi");
if (jndiName != null) {
dataSource = (DataSource) new InitialContext().lookup(jndiName);
} else {
String driverFile = config.getString("database.driverFile");
if (driverFile != null) {
URLClassLoader classLoader = (URLClassLoader) ClassLoader.getSystemClassLoader();
Method method = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
method.setAccessible(true);
method.invoke(classLoader, new File(driverFile).toURI().toURL());
}
String driver = config.getString("database.driver");
if (driver != null) {
Class.forName(driver);
}
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setDriverClassName(config.getString("database.driver"));
hikariConfig.setJdbcUrl(config.getString("database.url"));
hikariConfig.setUsername(config.getString("database.user"));
hikariConfig.setPassword(config.getString("database.password"));
hikariConfig.setConnectionInitSql("SELECT 1 FROM DUAL");
hikariConfig.setIdleTimeout(600000);
int maxPoolSize = config.getInteger("database.maxPoolSize");
if (maxPoolSize != 0) {
hikariConfig.setMaximumPoolSize(maxPoolSize);
}
dataSource = new HikariDataSource(hikariConfig);
}
}
private void updateDeviceCache(boolean force) throws SQLException {
boolean needWrite;
devicesLock.readLock().lock();
try {
needWrite = force || System.currentTimeMillis() - devicesLastUpdate > dataRefreshDelay;
} finally {
devicesLock.readLock().unlock();
}
if (needWrite) {
devicesLock.writeLock().lock();
try {
if (force || System.currentTimeMillis() - devicesLastUpdate > dataRefreshDelay) {
devicesById.clear();
devicesByUniqueId.clear();
for (Device device : getAllDevices()) {
devicesById.put(device.getId(), device);
devicesByUniqueId.put(device.getUniqueId(), device);
}
devicesLastUpdate = System.currentTimeMillis();
}
} finally {
devicesLock.writeLock().unlock();
}
}
}
@Override
public Device getDeviceById(long id) {
boolean forceUpdate;
devicesLock.readLock().lock();
try {
forceUpdate = !devicesById.containsKey(id);
} finally {
devicesLock.readLock().unlock();
}
try {
updateDeviceCache(forceUpdate);
} catch (SQLException e) {
Log.warning(e);
}
devicesLock.readLock().lock();
try {
return devicesById.get(id);
} finally {
devicesLock.readLock().unlock();
}
}
@Override
public Device getDeviceByUniqueId(String uniqueId) throws SQLException {
boolean forceUpdate;
devicesLock.readLock().lock();
try {
forceUpdate = !devicesByUniqueId.containsKey(uniqueId) && !config.getBoolean("database.ignoreUnknown");
} finally {
devicesLock.readLock().unlock();
}
updateDeviceCache(forceUpdate);
devicesLock.readLock().lock();
try {
return devicesByUniqueId.get(uniqueId);
} finally {
devicesLock.readLock().unlock();
}
}
private void updateGroupCache(boolean force) throws SQLException {
boolean needWrite;
groupsLock.readLock().lock();
try {
needWrite = force || System.currentTimeMillis() - groupsLastUpdate > dataRefreshDelay;
} finally {
groupsLock.readLock().unlock();
}
if (needWrite) {
groupsLock.writeLock().lock();
try {
if (force || System.currentTimeMillis() - groupsLastUpdate > dataRefreshDelay) {
groupsById.clear();
for (Group group : getAllGroups()) {
groupsById.put(group.getId(), group);
}
groupsLastUpdate = System.currentTimeMillis();
}
} finally {
groupsLock.writeLock().unlock();
}
}
}
public Group getGroupById(long id) {
boolean forceUpdate;
groupsLock.readLock().lock();
try {
forceUpdate = !groupsById.containsKey(id);
} finally {
groupsLock.readLock().unlock();
}
try {
updateGroupCache(forceUpdate);
} catch (SQLException e) {
Log.warning(e);
}
groupsLock.readLock().lock();
try {
return groupsById.get(id);
} finally {
groupsLock.readLock().unlock();
}
}
private String getQuery(String key) {
String query = config.getString(key);
if (query == null) {
Log.info("Query not provided: " + key);
}
return query;
}
private void initDatabaseSchema() throws SQLException, LiquibaseException {
if (config.hasKey("database.changelog")) {
ResourceAccessor resourceAccessor = new FileSystemResourceAccessor();
Database database = DatabaseFactory.getInstance().openDatabase(
config.getString("database.url"),
config.getString("database.user"),
config.getString("database.password"),
null, resourceAccessor);
Liquibase liquibase = new Liquibase(
config.getString("database.changelog"), resourceAccessor, database);
liquibase.clearCheckSums();
liquibase.update(new Contexts());
}
}
public User login(String email, String password) throws SQLException {
User user = QueryBuilder.create(dataSource, getQuery("database.loginUser"))
.setString("email", email)
.executeQuerySingle(User.class);
if (user != null && user.isPasswordValid(password)) {
return user;
} else {
return null;
}
}
public Collection<User> getUsers() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectUsersAll"))
.executeQuery(User.class);
}
public User getUser(long userId) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectUser"))
.setLong("id", userId)
.executeQuerySingle(User.class);
}
public void addUser(User user) throws SQLException {
user.setId(QueryBuilder.create(dataSource, getQuery("database.insertUser"), true)
.setObject(user)
.executeUpdate());
}
public void updateUser(User user) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateUser"))
.setObject(user)
.executeUpdate();
if (user.getHashedPassword() != null) {
QueryBuilder.create(dataSource, getQuery("database.updateUserPassword"))
.setObject(user)
.executeUpdate();
}
}
public void removeUser(long userId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteUser"))
.setLong("id", userId)
.executeUpdate();
}
public Collection<DevicePermission> getDevicePermissions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectDevicePermissions"))
.executeQuery(DevicePermission.class);
}
public Collection<GroupPermission> getGroupPermissions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectGroupPermissions"))
.executeQuery(GroupPermission.class);
}
public Collection<Device> getAllDevices() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectDevicesAll"))
.executeQuery(Device.class);
}
public Collection<Device> getDevices(long userId) throws SQLException {
Collection<Device> devices = new ArrayList<>();
for (long id : Context.getPermissionsManager().getDevicePermissions(userId)) {
devices.add(getDeviceById(id));
}
return devices;
}
public void addDevice(Device device) throws SQLException {
device.setId(QueryBuilder.create(dataSource, getQuery("database.insertDevice"), true)
.setObject(device)
.executeUpdate());
updateDeviceCache(true);
}
public void updateDevice(Device device) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateDevice"))
.setObject(device)
.executeUpdate();
updateDeviceCache(true);
}
public void updateDeviceStatus(Device device) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateDeviceStatus"))
.setObject(device)
.executeUpdate();
Device cachedDevice = getDeviceById(device.getId());
cachedDevice.setStatus(device.getStatus());
cachedDevice.setMotion(device.getMotion());
}
public void removeDevice(long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteDevice"))
.setLong("id", deviceId)
.executeUpdate();
updateDeviceCache(true);
}
public void linkDevice(long userId, long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.linkDevice"))
.setLong("userId", userId)
.setLong("deviceId", deviceId)
.executeUpdate();
}
public void unlinkDevice(long userId, long deviceId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.unlinkDevice"))
.setLong("userId", userId)
.setLong("deviceId", deviceId)
.executeUpdate();
}
public Collection<Group> getAllGroups() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectGroupsAll"))
.executeQuery(Group.class);
}
public Collection<Group> getGroups(long userId) throws SQLException {
Collection<Group> groups = new ArrayList<>();
for (long id : Context.getPermissionsManager().getGroupPermissions(userId)) {
groups.add(getGroupById(id));
}
return groups;
}
private void checkGroupCycles(Group group) {
groupsLock.readLock().lock();
try {
Set<Long> groups = new HashSet<>();
while (group != null) {
if (groups.contains(group.getId())) {
throw new IllegalArgumentException("Cycle in group hierarchy");
}
groups.add(group.getId());
group = groupsById.get(group.getGroupId());
}
} finally {
groupsLock.readLock().unlock();
}
}
public void addGroup(Group group) throws SQLException {
checkGroupCycles(group);
group.setId(QueryBuilder.create(dataSource, getQuery("database.insertGroup"), true)
.setObject(group)
.executeUpdate());
updateGroupCache(true);
}
public void updateGroup(Group group) throws SQLException {
checkGroupCycles(group);
QueryBuilder.create(dataSource, getQuery("database.updateGroup"))
.setObject(group)
.executeUpdate();
updateGroupCache(true);
}
public void removeGroup(long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.deleteGroup"))
.setLong("id", groupId)
.executeUpdate();
}
public void linkGroup(long userId, long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.linkGroup"))
.setLong("userId", userId)
.setLong("groupId", groupId)
.executeUpdate();
}
public void unlinkGroup(long userId, long groupId) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.unlinkGroup"))
.setLong("userId", userId)
.setLong("groupId", groupId)
.executeUpdate();
}
public Collection<Position> getPositions(long deviceId, Date from, Date to) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectPositions"))
.setLong("deviceId", deviceId)
.setDate("from", from)
.setDate("to", to)
.executeQuery(Position.class);
}
public void addPosition(Position position) throws SQLException {
position.setId(QueryBuilder.create(dataSource, getQuery("database.insertPosition"), true)
.setDate("now", new Date())
.setObject(position)
.executeUpdate());
}
public void updateLatestPosition(Position position) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateLatestPosition"))
.setDate("now", new Date())
.setObject(position)
.executeUpdate();
Device device = getDeviceById(position.getDeviceId());
device.setPositionId(position.getId());
}
public Collection<Position> getLatestPositions() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectLatestPositions"))
.executeQuery(Position.class);
}
public Server getServer() throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectServers"))
.executeQuerySingle(Server.class);
}
public void updateServer(Server server) throws SQLException {
QueryBuilder.create(dataSource, getQuery("database.updateServer"))
.setObject(server)
.executeUpdate();
}
public Event getEvent(long eventId) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectEvent"))
.setLong("id", eventId)
.executeQuerySingle(Event.class);
}
public void addEvent(Event event) throws SQLException {
event.setId(QueryBuilder.create(dataSource, getQuery("database.insertEvent"), true)
.setObject(event)
.executeUpdate());
}
public Collection<Event> getEvents(long deviceId, String type, Date from, Date to) throws SQLException {
return QueryBuilder.create(dataSource, getQuery("database.selectEvents"))
.setLong("deviceId", deviceId)
.setString("type", type)
.setDate("from", from)
.setDate("to", to)
.executeQuery(Event.class);
}
public Collection<Event> getLastEvents(long deviceId, String type, int interval) throws SQLException {
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.SECOND, -interval);
Date to = calendar.getTime();
return getEvents(deviceId, type, new Date(), to);
}
}
| - Fixed code style. | src/org/traccar/database/DataManager.java | - Fixed code style. | <ide><path>rc/org/traccar/database/DataManager.java
<ide> int maxPoolSize = config.getInteger("database.maxPoolSize");
<ide>
<ide> if (maxPoolSize != 0) {
<del> hikariConfig.setMaximumPoolSize(maxPoolSize);
<add> hikariConfig.setMaximumPoolSize(maxPoolSize);
<ide> }
<ide>
<ide> dataSource = new HikariDataSource(hikariConfig); |
|
JavaScript | mpl-2.0 | 9bcedb638f7178397da03782039c343dc204d8d0 | 0 | TbLtzk/Centaurus,TbLtzk/Centaurus | const reserveChunkCost = 10;
const inflationDestBalanceBuffer = 10;
var centaurusAddress = 'GDJXQYEWDPGYK4LGCLFEV6HBIW3M22IK6NN2WQONHP3ELH6HINIKBVY7';
var poolAddress = 'GA3FUYFOPWZ25YXTCA73RK2UGONHCO27OHQRSGV3VCE67UEPEFEDCOPA';
angular.module('starter.services', ['starter.services.basic'])
.factory('Account', function ($rootScope, UIHelper, Settings, Remote) {
var account;
var keysChanged = false;
var connectionChanged = false;
var paymentsCloseHandle;
var resetAccount = function () {
account = {
address: 'loading',
isLive: false,
balance: 0,
reserve: 0,
sequence: "0",
transactions: [],
anchors: [],
otherCurrencies: []
};
};
var snapshot = window.localStorage['accountInfo'];
if (snapshot)
account = JSON.parse(snapshot);
else
resetAccount();
var buildTransaction = function (operation, memo, bSign) {
return buildBatchTransaction([operation], memo, bSign);
};
var buildBatchTransaction = function (operations, memo, bSign) {
var acc = new StellarSdk.Account(account.address, account.sequence);
var builder = new StellarSdk.TransactionBuilder(acc);
for (var index = 0; index < operations.length; index++) {
var operation = operations[index];
builder = builder.addOperation(operation);
}
if (memo)
builder = builder.addMemo(memo);
var transaction = builder.build();
if (bSign === true)
transaction.sign(Settings.getKeyPair());
return transaction;
};
var increaseSequence = function () {
var sdkAcc = new StellarSdk.Account(account.address, account.sequence);
sdkAcc.incrementSequenceNumber();
account.sequence = sdkAcc.sequenceNumber();
}
var submitTransaction = function (transaction, accountIncrease, silent) {
if (!accountIncrease)
accountIncrease = 'onError';
var output = function (captionRes, plainSuffix) {
if(silent)
console.log(captionRes + plainSuffix);
else
UIHelper.showAlert(captionRes, plainSuffix);
};
var promise = new Promise(function (resolve, reject) {
Remote.getServer().submitTransaction(transaction)
.then(function (transactionResult) {
console.log(transactionResult);
if (accountIncrease === 'onSuccess' || accountIncrease === 'both')
increaseSequence();
resolve(transactionResult);
})
.catch(function (err) {
console.log(JSON.stringify(err));
if (err.type === 'https://stellar.org/horizon-errors/transaction_failed') {
var errorCode = err.extras && err.extras.result_codes ? err.extras.result_codes.transaction : null;
if (errorCode === "tx_bad_seq") {
output('controllers.send.outOfSync');
Settings.get().onKeysAvailable();
}
else {
var suffix = ' ' + errorCode;
var opCode = err.extras && err.extras.result_codes.operations[0];
if (opCode)
suffix += ', ' + opCode
output('controllers.send.failed ', suffix);
}
}
else {
var msg = err.title;
if (err.extras && err.extras.result_codes)
msg += ': ' + err.extras.result_codes.transaction;
if (!msg)
msg = 'controllers.send.failed.unknown';
output(msg);
}
if (accountIncrease === 'onError' || accountIncrease === 'both')
increaseSequence();
reject(err);
});
});
return promise;
}
var setInflationDestination = function () {
if (account.balance < account.reserve + inflationDestBalanceBuffer)
return;
var operation = StellarSdk.Operation.setOptions({
inflationDest: poolAddress,
homeDomain: 'centaurus.xcoins.de'
});
var transaction = buildTransaction(operation, null, true);
submitTransaction(transaction, 'both', true);
};
var changeTrustForIssuer = function (issuer, assetCodes, newLimit) {
if (!(assetCodes.length > 0))
return;
var assets = [];
for (var index = 0; index < assetCodes.length; index++) {
var assetCode = assetCodes[index];
var asset = new StellarSdk.Asset(assetCode, issuer);
assets.push(asset);
}
changeTrust(assets, newLimit);
};
var changeTrust = function (assets, newLimit) {
if (!(assets.length > 0))
return;
var operations = [];
for (var index = 0; index < assets.length; index++) {
var asset = assets[index];
var operation = StellarSdk.Operation.changeTrust({
asset: asset,
limit: newLimit
});
operations.push(operation);
}
var transaction = buildBatchTransaction(operations, null, true);
submitTransaction(transaction);
};
var addToBalance = function (currency, amount) {
if (currency === 'native' || currency === 'XLM' || currency == null) {
account.balance += amount;
return;
}
for(var index = 0; index < account.otherCurrencies.length; ++index) {
var entry = account.otherCurrencies[index];
if(entry.currency == currency)
{
entry.amount += amount;
return;
}
}
// no entry for currency exists -> add new entry
account.otherCurrencies.push({currency:currency, amount:amount});
};
var addAnchorAsset = function (issuer, currency) {
for (var index = 0; index < account.anchors.length; ++index) {
var anchor = account.anchors[index];
if (anchor.accountId == issuer) {
anchor.assets.push(currency);
return;
}
}
// no entry for this issuer exists -> add new entry
account.anchors.push({ accountId: issuer, assets: [currency] });
}
var attachToKeys = function () {
var keys = Settings.getKeys();
// initial balances
Remote.getServer().accounts()
.accountId(keys.address)
.call()
.then(function (acc) {
console.log(JSON.stringify(acc));
resetAccount();
account.address = keys.address;
var reserveChunks = 1 + acc.signers.length; // minimum reserve
for (i = 0; i < acc.balances.length; i++){
var bal = acc.balances[i];
var amount = parseFloat(bal.balance);
if (bal.asset_code) {
reserveChunks++;
addAnchorAsset(bal.asset_issuer, bal.asset_code);
}
addToBalance(bal.asset_code, amount);
}
account.sequence = acc.sequence;
if (acc.offers && acc.offers.length) {
for (i = 0; i < acc.offers.length; i++) {
var offer = acc.offers[i];
if (offer)
reserveChunks++;
}
}
account.reserve = reserveChunks * reserveChunkCost;
if((acc.inflation_destination !== poolAddress) || !acc.home_domain)
setInflationDestination();
window.localStorage['accountInfo'] = JSON.stringify(account);
account.isLive = true;
$rootScope.$broadcast('accountInfoLoaded');
})
.catch(StellarSdk.NotFoundError, function (err) {
console.log("account not found");
resetAccount();
account.address = keys.address;
window.localStorage['accountInfo'] = JSON.stringify(account);
account.isLive = true;
$rootScope.$broadcast('accountInfoLoaded');
//Remote.getServer().friendbot(keys.address).call();
})
.catch(function (err) {
console.log(err.stack || err);
})
.then(function () {
if (account.address !== keys.address) {
resetAccount();
account.address = keys.address;
}
})
var applyToBalance = function (effect) {
if (effect.type === 'account_created')
addToBalance(effect.asset_code, parseFloat(effect.starting_balance));
else if (effect.type === 'account_debited')
addToBalance(effect.asset_code, -parseFloat(effect.amount));
else if (effect.type === 'account_credited')
addToBalance(effect.asset_code, parseFloat(effect.amount));
};
var insertTransaction = function (trx, op, effect, fromStream) {
var asset = effect.asset_code;
if (asset === null || !asset)
asset = 'XLM'
else
asset = effect.asset_code;
var date = new Date(trx.created_at)
var displayEffect = {
trxId : trx.id,
effectId : effect.paging_token,
creationDate: date,
creationTimestamp : date.getTime(),
asset_code: asset,
amount: effect.amount,
debit: effect.type === 'account_debited',
sender: op.from,
receiver: op.to,
memo: trx.memo,
memoType: trx.memo_type
}
if (op.type === 'create_account') {
displayEffect.amount = op.starting_balance;
displayEffect.sender = op.funder;
displayEffect.receiver = op.account;
}
if (fromStream && account.address === trx.source_account)
account.sequence = trx.source_account_sequence;
// insert at correct position
var i;
for (i = 0; i < account.transactions.length; i++) {
var compareEffect = account.transactions[i];
if (displayEffect.effectId === compareEffect.effectId)
throw 'transaction already seen: ' + displayEffect.effectId;
if (displayEffect.creationTimestamp > compareEffect.creationTimestamp) {
break;
}
}
account.transactions.splice(i, 0, displayEffect);
return displayEffect;
};
var insertEffect = function (effect, fromStream) {
var promise = new Promise(function(resolve, reject) {
try {
effect.operation()
.then(function (op) {
op.transaction()
.then(function (trx) {
try {
var displayEffect = insertTransaction(trx, op, effect, fromStream);
resolve(displayEffect);
}
catch (err) {
reject(err);
}
});
})
}
catch(err) {
reject(err);
}
});
return promise;
};
var detachFromPaymentsStream = function () {
if (!paymentsCloseHandle)
return;
console.log('close open effects stream')
paymentsCloseHandle();
paymentsCloseHandle = undefined;
};
var attachToPaymentsStream = function (opt_startFrom) {
var futurePayments = Remote.getServer().effects().forAccount(keys.address);
if (opt_startFrom) {
futurePayments = futurePayments.cursor(opt_startFrom);
}
if (paymentsCloseHandle)
detachFromPaymentsStream();
paymentsCloseHandle = futurePayments.stream({
onmessage: function (effect) {
effectHandler(effect, true);
},
onerror: function (error) {
account.isLive = false;
console.log(JSON.stringify(error));
}
});
};
var effectHandler = function (effect, fromStream) {
console.log(effect);
if (fromStream){
var reload =
effect.type === 'trustline_updated'
|| effect.type === 'trustline_created'
|| effect.type === 'trustline_removed';
if (reload) {
detachFromPaymentsStream();
Settings.get().onKeysAvailable();
}
}
var isRelevant =
effect.type === 'account_created'
|| effect.type === 'account_debited'
|| effect.type === 'account_credited'
;
if(isRelevant) {
insertEffect(effect, fromStream)
.then(function (displayEffect) {
if (fromStream) {
applyToBalance(effect);
$rootScope.$broadcast('accountInfoLoaded');
}
else {
$rootScope.$broadcast('newTransaction');
}
}, function (err) {
console.error(err)
});
}
};
Remote.getServer().effects()
.forAccount(keys.address)
.limit(30)
.order('desc')
.call()
.then(function (effectResults) {
var length = effectResults.records ? effectResults.records.length : 0;
for (index = length-1; index >= 0; index--) {
var currentEffect = effectResults.records[index];
effectHandler(currentEffect, false);
}
var startListeningFrom;
if (length > 0) {
latestPayment = effectResults.records[0];
startListeningFrom = latestPayment.paging_token;
}
attachToPaymentsStream(startListeningFrom);
})
.catch(function (err) {
attachToPaymentsStream('now');
console.log(err)
});
};
Settings.get().onKeysAvailable = function () {
keysChanged = true;
};
var healthCheck = function(){
var keys = Settings.getKeys();
if(!keys)
Settings.get().init();
if(!Remote.isConnected())
{
connectionChanged = true;
}
if((keysChanged || connectionChanged) && keys && Remote.isConnected())
{
attachToKeys();
keysChanged = false;
connectionChanged = false;
}
}
healthCheck();
setInterval(healthCheck, 3000);
return {
get : function () {
return account;
},
getAvailableLumen : function (){
return account.balance - account.reserve;
},
buildTransaction: buildTransaction,
submitTransaction: submitTransaction,
changeTrust: changeTrust,
reload: function () {
Settings.get().onKeysAvailable()
}
}
})
.factory('Contacts', function () {
// contact names are considered an id and have to be unique
var contacts = [
{ name: 'Centaurus donation address', address: centaurusAddress, memo: null, memoType: null }
];
var contactsString = window.localStorage['contacts001'];
if (contactsString)
contacts = JSON.parse(contactsString);
return {
save: function(){
var contactsString = JSON.stringify(contacts);
window.localStorage['contacts001'] = contactsString;
},
getAll: function () {
return contacts;
},
findIndex: function (name) {
if (!name)
return -1;
var normalized = name.toUpperCase();
for (var i = 0; i < contacts.length; i++) {
if (contacts[i].name.toUpperCase() === normalized)
return i;
}
return -1;
},
find: function (name) {
var index = this.findIndex(name);
if (index < 0)
return null;
return contacts[index];
},
findReverse: function(address, memo){
var bestMatch = null;
for (var i = 0; i < contacts.length; i++) {
if (contacts[i].address === address) {
if (contacts[i].memo === memo)
return contacts[i];
else if(!bestMatch)
bestMatch = contacts[i];
}
}
return bestMatch;
},
add: function (name, address, memo, memoType) {
if (!name)
return false;
if (this.find(name) != null)
return false;
contacts.push({ name: name, address: address, memo: memo, memoType: memoType });
this.save();
return true;
},
removeAt: function(index){
if (index < 0)
return false;
contacts.splice(index, 1);
this.save();
return true;
},
remove: function(c){
if (!c)
return false;
var index = contacts.indexOf(c);
return this.removeAt(index);
},
removeByName: function (name) {
return this.removeAt(this.findIndex(name));
}
}
})
.factory('Commands', function ($http, UIHelper, Settings, Account) {
if (typeof String.prototype.startsWith != 'function') {
String.prototype.startsWith = function (str){
return this.slice(0, str.length) == str;
};
}
var knownCommands = [];
knownCommands.add = function(commandName, callback){
knownCommands.push( { name: commandName, callback: callback } );
};
var importKeys = function(newKeys){
var oldKeys = Settings.getKeys();
if(oldKeys.address == newKeys.address && oldKeys.secret == newKeys.secret) {
UIHelper.showAlert('services.commands.importKeys.noChange');
}
else {
var doOverwrite = function(){
Settings.setKeys(newKeys.address, newKeys.secret);
UIHelper.showAlert('services.commands.importKeys.ok');
};
if(Account.get().balance > 0) {
UIHelper.confirmAndRun(
'service.commands.importKeys.overwrite.caption',
'service.commands.importKeys.overwrite.text',
doOverwrite
);
}
else{
doOverwrite();
}
}
return true;
}
var redeemStr = function (oldSecret, onSuccess) {
try {
var newKeys = Settings.getKeys();
//var newKeys = {
// address: 'GALYYRH5XCRLVQ3W56PNEZHRV37GY3VFRRFUYU4NNDKOGUAB22OQPUX4',
// secret: 'SDL3VTYAPQCOJDKA34WGXOIJA4RRQ6TAF5NJSVI77KEKP22L2GLIM6GN'
//};
//oldSecret = 'sfmB34AMuAPrgbgeFJ7iXxi14NaKxQfcXoEex3p4TqekAgvinha';
var data = JSON.stringify({
newAddress: newKeys.address
});
var keypair = StellarSdk.Keypair.fromBase58Seed(oldSecret);
var publicKey = nacl.util.encodeBase64(keypair.rawPublicKey());
var signatureRaw = keypair.sign(data);
var signature = nacl.util.encodeBase64(signatureRaw);
var message = {
data: data,
publicKey: publicKey,
signature: signature
};
$http.post('https://api.stellar.org/upgrade/upgrade', message).then(function (resp) {
// For JSON responses, resp.data contains the result
console.log('Success', resp);
if(onSuccess)
onSuccess(resp);
return true;
}, function (err) {
// err.status will contain the status code
if (err.data && err.data.message)
UIHelper.showAlert(err.data.message);
else
UIHelper.showAlert(JSON.stringify(err));
return false;
});
} catch (err) {
UIHelper.showAlert(err.message);
return false;
}
}
var redeemStrCallback = function (content) {
var oldSecret = content;
var onSuccess = function (resp) {
UIHelper.showAlert('Your STR will be converted to XLM! You might need to close and reopen Centaurus.');
};
return redeemStr(oldSecret, onSuccess);
};
knownCommands.add('redeemSTR001', redeemStrCallback);
var backupCallback = function(content){
var unmasked = atob(content);
var newKeys = JSON.parse(unmasked);
return redeemStrCallback(newKeys.secret);
};
knownCommands.add('backup001', backupCallback);
var backupCallback2 = function(content){
UIHelper.promptForPassword(function(pwd){
try{
var decrypted = CryptoJS.AES.decrypt(content, pwd).toString(CryptoJS.enc.Utf8);
var newKeys = JSON.parse(decrypted);
return redeemStrCallback(newKeys.secret);
} catch (ex) {
console.log(ex.message);
}
UIHelper.showAlert('services.commands.backup.incorrectPwd');
return false;
});
};
knownCommands.add('backup002', backupCallback2);
var backupCallback3 = function (content) {
UIHelper.promptForPassword(function (pwd) {
try {
var decrypted = CryptoJS.AES.decrypt(content, pwd).toString(CryptoJS.enc.Utf8);
var newKeys = JSON.parse(decrypted);
return importKeys(newKeys);
} catch (ex) {
console.log(ex.message);
}
UIHelper.showAlert('services.commands.backup.incorrectPwd');
return false;
});
};
knownCommands.add('backup003', backupCallback3);
return {
parse : function (input) {
var result = {
isCommand : false,
rawCommand: ''
}
if(!input)
return result;
var normalized = input.replace('\\:', ':');
if(normalized.startsWith('centaurus:')){
result.isCommand = true;
result.rawCommand = normalized.substring(10);
}
return result;
},
execute : function (rawCommand) {
var result = {
success : false,
commandName : 'unknownCommand'
}
for (var i=0; i < knownCommands.length; i++) {
var command = knownCommands[i];
if(rawCommand.startsWith(command.name)) {
result.commandName = command.name;
result.success = command.callback(rawCommand.substring(command.name.length));
}
}
},
importAddressAndSecret : function (addr, s){
var newKeys = {
address : addr,
secret : s
};
return importKeys(newKeys);
},
upgradeFromStr: redeemStr
};
})
| www/js/services/services.js | const reserveChunkCost = 10;
const inflationDestBalanceBuffer = 10;
var centaurusAddress = 'GDJXQYEWDPGYK4LGCLFEV6HBIW3M22IK6NN2WQONHP3ELH6HINIKBVY7';
var poolAddress = 'GA3FUYFOPWZ25YXTCA73RK2UGONHCO27OHQRSGV3VCE67UEPEFEDCOPA';
angular.module('starter.services', ['starter.services.basic'])
.factory('Account', function ($rootScope, UIHelper, Settings, Remote) {
var account;
var keysChanged = false;
var connectionChanged = false;
var paymentsCloseHandle;
var resetAccount = function () {
account = {
address: 'loading',
isLive: false,
balance: 0,
reserve: 0,
sequence: "0",
transactions: [],
anchors: [],
otherCurrencies: []
};
};
var snapshot = window.localStorage['accountInfo'];
if (snapshot)
account = JSON.parse(snapshot);
else
resetAccount();
var buildTransaction = function (operation, memo, bSign) {
return buildBatchTransaction([operation], memo, bSign);
};
var buildBatchTransaction = function (operations, memo, bSign) {
var acc = new StellarSdk.Account(account.address, account.sequence);
var builder = new StellarSdk.TransactionBuilder(acc);
for (var index = 0; index < operations.length; index++) {
var operation = operations[index];
builder = builder.addOperation(operation);
}
if (memo)
builder = builder.addMemo(memo);
var transaction = builder.build();
if (bSign === true)
transaction.sign(Settings.getKeyPair());
return transaction;
};
var increaseSequence = function () {
var sdkAcc = new StellarSdk.Account(account.address, account.sequence);
sdkAcc.incrementSequenceNumber();
account.sequence = sdkAcc.sequenceNumber();
}
var submitTransaction = function (transaction, accountIncrease, silent) {
if (!accountIncrease)
accountIncrease = 'onError';
var output = function (captionRes, plainSuffix) {
if(silent)
console.log(captionRes + plainSuffix);
else
UIHelper.showAlert(captionRes, plainSuffix);
};
var promise = new Promise(function (resolve, reject) {
Remote.getServer().submitTransaction(transaction)
.then(function (transactionResult) {
console.log(transactionResult);
if (accountIncrease === 'onSuccess' || accountIncrease === 'both')
increaseSequence();
resolve(transactionResult);
})
.catch(function (err) {
console.log(JSON.stringify(err));
if (err.type === 'https://stellar.org/horizon-errors/transaction_failed') {
var errorCode = err.extras && err.extras.result_codes ? err.extras.result_codes.transaction : null;
if (errorCode === "tx_bad_seq") {
output('controllers.send.outOfSync');
Settings.get().onKeysAvailable();
}
else {
var suffix = ' ' + errorCode;
var opCode = err.extras && err.extras.result_codes.operations[0];
if (opCode)
suffix += ', ' + opCode
output('controllers.send.failed ', suffix);
}
}
else {
var msg = err.title;
if (err.extras && err.extras.result_codes)
msg += ': ' + err.extras.result_codes.transaction;
if (!msg)
msg = 'controllers.send.failed.unknown';
output(msg);
}
if (accountIncrease === 'onError' || accountIncrease === 'both')
increaseSequence();
reject(err);
});
});
return promise;
}
var setInflationDestination = function () {
if (account.balance < account.reserve + inflationDestBalanceBuffer)
return;
var operation = StellarSdk.Operation.setOptions({
inflationDest: poolAddress,
homeDomain: 'centaurus.xcoins.de'
});
var transaction = buildTransaction(operation, null, true);
submitTransaction(transaction, 'both', true);
};
var changeTrustForIssuer = function (issuer, assetCodes, newLimit) {
if (!(assetCodes.length > 0))
return;
var assets = [];
for (var index = 0; index < assetCodes.length; index++) {
var assetCode = assetCodes[index];
var asset = new StellarSdk.Asset(assetCode, issuer);
assets.push(asset);
}
changeTrust(assets, newLimit);
};
var changeTrust = function (assets, newLimit) {
if (!(assets.length > 0))
return;
var operations = [];
for (var index = 0; index < assets.length; index++) {
var asset = assets[index];
var operation = StellarSdk.Operation.changeTrust({
asset: asset,
limit: newLimit
});
operations.push(operation);
}
var transaction = buildBatchTransaction(operations, null, true);
submitTransaction(transaction);
};
var addToBalance = function (currency, amount) {
if (currency === 'native' || currency === 'XLM' || currency == null) {
account.balance += amount;
return;
}
for(var index = 0; index < account.otherCurrencies.length; ++index) {
var entry = account.otherCurrencies[index];
if(entry.currency == currency)
{
entry.amount += amount;
return;
}
}
// no entry for currency exists -> add new entry
account.otherCurrencies.push({currency:currency, amount:amount});
};
var addAnchorAsset = function (issuer, currency) {
for (var index = 0; index < account.anchors.length; ++index) {
var anchor = account.anchors[index];
if (anchor.accountId == issuer) {
anchor.assets.push(currency);
return;
}
}
// no entry for this issuer exists -> add new entry
account.anchors.push({ accountId: issuer, assets: [currency] });
}
var attachToKeys = function () {
var keys = Settings.getKeys();
// initial balances
Remote.getServer().accounts()
.accountId(keys.address)
.call()
.then(function (acc) {
console.log(JSON.stringify(acc));
resetAccount();
account.address = keys.address;
var reserveChunks = 1 + acc.signers.length; // minimum reserve
for (i = 0; i < acc.balances.length; i++){
var bal = acc.balances[i];
var amount = parseFloat(bal.balance);
if (bal.asset_code) {
reserveChunks++;
addAnchorAsset(bal.asset_issuer, bal.asset_code);
}
addToBalance(bal.asset_code, amount);
}
account.sequence = acc.sequence;
if (acc.offers && acc.offers.length) {
for (i = 0; i < acc.offers.length; i++) {
var offer = acc.offers[i];
if (offer)
reserveChunks++;
}
}
account.reserve = reserveChunks * reserveChunkCost;
if((acc.inflation_destination !== poolAddress) || !acc.home_domain)
setInflationDestination();
window.localStorage['accountInfo'] = JSON.stringify(account);
account.isLive = true;
$rootScope.$broadcast('accountInfoLoaded');
})
.catch(StellarSdk.NotFoundError, function (err) {
console.log("account not found");
resetAccount();
account.address = keys.address;
window.localStorage['accountInfo'] = JSON.stringify(account);
account.isLive = true;
$rootScope.$broadcast('accountInfoLoaded');
//Remote.getServer().friendbot(keys.address).call();
})
.catch(function (err) {
console.log(err.stack || err);
})
.then(function () {
if (account.address !== keys.address) {
resetAccount();
account.address = keys.address;
}
})
var applyToBalance = function (effect) {
if (effect.type === 'account_created')
addToBalance(effect.asset_code, parseFloat(effect.starting_balance));
else if (effect.type === 'account_debited')
addToBalance(effect.asset_code, -parseFloat(effect.amount));
else if (effect.type === 'account_credited')
addToBalance(effect.asset_code, parseFloat(effect.amount));
};
var insertTransaction = function (trx, op, effect, fromStream) {
var asset = effect.asset_code;
if (asset === null || !asset)
asset = 'XLM'
else
asset = effect.asset_code;
var date = new Date(trx.created_at)
var displayEffect = {
trxId : trx.id,
effectId : effect.paging_token,
creationDate: date,
creationTimestamp : date.getTime(),
asset_code: asset,
amount: effect.amount,
debit: effect.type === 'account_debited',
sender: op.from,
receiver: op.to,
memo: trx.memo,
memoType: trx.memo_type
}
if (op.type === 'create_account') {
displayEffect.amount = op.starting_balance;
displayEffect.sender = op.funder;
displayEffect.receiver = op.account;
}
if (fromStream && account.address === trx.source_account)
account.sequence = trx.source_account_sequence;
// insert at correct position
var i;
for (i = 0; i < account.transactions.length; i++) {
var compareEffect = account.transactions[i];
if (displayEffect.effectId === compareEffect.effectId)
throw 'transaction already seen: ' + displayEffect.effectId;
if (displayEffect.creationTimestamp > compareEffect.creationTimestamp) {
break;
}
}
account.transactions.splice(i, 0, displayEffect);
return displayEffect;
};
var insertEffect = function (effect, fromStream) {
var promise = new Promise(function(resolve, reject) {
try {
effect.operation()
.then(function (op) {
op.transaction()
.then(function (trx) {
try {
var displayEffect = insertTransaction(trx, op, effect, fromStream);
resolve(displayEffect);
}
catch (err) {
reject(err);
}
});
})
}
catch(err) {
reject(err);
}
});
return promise;
};
var detachFromPaymentsStream = function () {
if (!paymentsCloseHandle)
return;
console.log('close open effects stream')
paymentsCloseHandle();
paymentsCloseHandle = undefined;
};
var attachToPaymentsStream = function (opt_startFrom) {
var futurePayments = Remote.getServer().effects().forAccount(keys.address);
if (opt_startFrom) {
futurePayments = futurePayments.cursor(opt_startFrom);
}
if (paymentsCloseHandle)
detachFromPaymentsStream();
paymentsCloseHandle = futurePayments.stream({
onmessage: function (effect) {
effectHandler(effect, true);
},
onerror: function (error) {
account.isLive = false;
console.log(JSON.stringify(error));
}
});
};
var effectHandler = function (effect, fromStream) {
console.log(effect);
if (fromStream){
var reload =
effect.type === 'trustline_updated'
|| effect.type === 'trustline_created'
|| effect.type === 'trustline_removed';
if (reload) {
detachFromPaymentsStream();
Settings.get().onKeysAvailable();
}
}
var isRelevant =
effect.type === 'account_created'
|| effect.type === 'account_debited'
|| effect.type === 'account_credited'
;
if(isRelevant) {
insertEffect(effect, fromStream)
.then(function (displayEffect) {
if (fromStream) {
applyToBalance(effect);
$rootScope.$broadcast('accountInfoLoaded');
}
else {
$rootScope.$broadcast('newTransaction');
}
}, function (err) {
console.error(err)
});
}
};
Remote.getServer().effects()
.forAccount(keys.address)
.limit(30)
.order('desc')
.call()
.then(function (effectResults) {
var length = effectResults.records ? effectResults.records.length : 0;
for (index = length-1; index >= 0; index--) {
var currentEffect = effectResults.records[index];
effectHandler(currentEffect, false);
}
var startListeningFrom;
if (length > 0) {
latestPayment = effectResults.records[0];
startListeningFrom = latestPayment.paging_token;
}
attachToPaymentsStream(startListeningFrom);
})
.catch(function (err) {
attachToPaymentsStream('now');
console.log(err)
});
};
Settings.get().onKeysAvailable = function () {
keysChanged = true;
};
var healthCheck = function(){
var keys = Settings.getKeys();
if(!keys)
Settings.get().init();
if(!Remote.isConnected())
{
connectionChanged = true;
}
if((keysChanged || connectionChanged) && keys && Remote.isConnected())
{
attachToKeys();
keysChanged = false;
connectionChanged = false;
}
}
healthCheck();
setInterval(healthCheck, 3000);
return {
get : function () {
return account;
},
getAvailableLumen : function (){
return account.balance - account.reserve;
},
buildTransaction: buildTransaction,
submitTransaction: submitTransaction,
changeTrust: changeTrust,
reload: function () {
Settings.get().onKeysAvailable()
}
}
})
.factory('Contacts', function () {
// contact names are considered an id and have to be unique
var contacts = [
{ name: 'Centaurus', address: centaurusAddress, memo: null, memoType: null }
];
var contactsString = window.localStorage['contacts001'];
if (contactsString)
contacts = JSON.parse(contactsString);
return {
save: function(){
var contactsString = JSON.stringify(contacts);
window.localStorage['contacts001'] = contactsString;
},
getAll: function () {
return contacts;
},
findIndex: function (name) {
if (!name)
return -1;
var normalized = name.toUpperCase();
for (var i = 0; i < contacts.length; i++) {
if (contacts[i].name.toUpperCase() === normalized)
return i;
}
return -1;
},
find: function (name) {
var index = this.findIndex(name);
if (index < 0)
return null;
return contacts[index];
},
findReverse: function(address, memo){
var bestMatch = null;
for (var i = 0; i < contacts.length; i++) {
if (contacts[i].address === address) {
if (contacts[i].memo === memo)
return contacts[i];
else if(!bestMatch)
bestMatch = contacts[i];
}
}
return bestMatch;
},
add: function (name, address, memo, memoType) {
if (!name)
return false;
if (this.find(name) != null)
return false;
contacts.push({ name: name, address: address, memo: memo, memoType: memoType });
this.save();
return true;
},
removeAt: function(index){
if (index < 0)
return false;
contacts.splice(index, 1);
this.save();
return true;
},
remove: function(c){
if (!c)
return false;
var index = contacts.indexOf(c);
return this.removeAt(index);
},
removeByName: function (name) {
return this.removeAt(this.findIndex(name));
}
}
})
.factory('Commands', function ($http, UIHelper, Settings, Account) {
if (typeof String.prototype.startsWith != 'function') {
String.prototype.startsWith = function (str){
return this.slice(0, str.length) == str;
};
}
var knownCommands = [];
knownCommands.add = function(commandName, callback){
knownCommands.push( { name: commandName, callback: callback } );
};
var importKeys = function(newKeys){
var oldKeys = Settings.getKeys();
if(oldKeys.address == newKeys.address && oldKeys.secret == newKeys.secret) {
UIHelper.showAlert('services.commands.importKeys.noChange');
}
else {
var doOverwrite = function(){
Settings.setKeys(newKeys.address, newKeys.secret);
UIHelper.showAlert('services.commands.importKeys.ok');
};
if(Account.get().balance > 0) {
UIHelper.confirmAndRun(
'service.commands.importKeys.overwrite.caption',
'service.commands.importKeys.overwrite.text',
doOverwrite
);
}
else{
doOverwrite();
}
}
return true;
}
var redeemStr = function (oldSecret, onSuccess) {
try {
var newKeys = Settings.getKeys();
//var newKeys = {
// address: 'GALYYRH5XCRLVQ3W56PNEZHRV37GY3VFRRFUYU4NNDKOGUAB22OQPUX4',
// secret: 'SDL3VTYAPQCOJDKA34WGXOIJA4RRQ6TAF5NJSVI77KEKP22L2GLIM6GN'
//};
//oldSecret = 'sfmB34AMuAPrgbgeFJ7iXxi14NaKxQfcXoEex3p4TqekAgvinha';
var data = JSON.stringify({
newAddress: newKeys.address
});
var keypair = StellarSdk.Keypair.fromBase58Seed(oldSecret);
var publicKey = nacl.util.encodeBase64(keypair.rawPublicKey());
var signatureRaw = keypair.sign(data);
var signature = nacl.util.encodeBase64(signatureRaw);
var message = {
data: data,
publicKey: publicKey,
signature: signature
};
$http.post('https://api.stellar.org/upgrade/upgrade', message).then(function (resp) {
// For JSON responses, resp.data contains the result
console.log('Success', resp);
if(onSuccess)
onSuccess(resp);
return true;
}, function (err) {
// err.status will contain the status code
if (err.data && err.data.message)
UIHelper.showAlert(err.data.message);
else
UIHelper.showAlert(JSON.stringify(err));
return false;
});
} catch (err) {
UIHelper.showAlert(err.message);
return false;
}
}
var redeemStrCallback = function (content) {
var oldSecret = content;
var onSuccess = function (resp) {
UIHelper.showAlert('Your STR will be converted to XLM! You might need to close and reopen Centaurus.');
};
return redeemStr(oldSecret, onSuccess);
};
knownCommands.add('redeemSTR001', redeemStrCallback);
var backupCallback = function(content){
var unmasked = atob(content);
var newKeys = JSON.parse(unmasked);
return redeemStrCallback(newKeys.secret);
};
knownCommands.add('backup001', backupCallback);
var backupCallback2 = function(content){
UIHelper.promptForPassword(function(pwd){
try{
var decrypted = CryptoJS.AES.decrypt(content, pwd).toString(CryptoJS.enc.Utf8);
var newKeys = JSON.parse(decrypted);
return redeemStrCallback(newKeys.secret);
} catch (ex) {
console.log(ex.message);
}
UIHelper.showAlert('services.commands.backup.incorrectPwd');
return false;
});
};
knownCommands.add('backup002', backupCallback2);
var backupCallback3 = function (content) {
UIHelper.promptForPassword(function (pwd) {
try {
var decrypted = CryptoJS.AES.decrypt(content, pwd).toString(CryptoJS.enc.Utf8);
var newKeys = JSON.parse(decrypted);
return importKeys(newKeys);
} catch (ex) {
console.log(ex.message);
}
UIHelper.showAlert('services.commands.backup.incorrectPwd');
return false;
});
};
knownCommands.add('backup003', backupCallback3);
return {
parse : function (input) {
var result = {
isCommand : false,
rawCommand: ''
}
if(!input)
return result;
var normalized = input.replace('\\:', ':');
if(normalized.startsWith('centaurus:')){
result.isCommand = true;
result.rawCommand = normalized.substring(10);
}
return result;
},
execute : function (rawCommand) {
var result = {
success : false,
commandName : 'unknownCommand'
}
for (var i=0; i < knownCommands.length; i++) {
var command = knownCommands[i];
if(rawCommand.startsWith(command.name)) {
result.commandName = command.name;
result.success = command.callback(rawCommand.substring(command.name.length));
}
}
},
importAddressAndSecret : function (addr, s){
var newKeys = {
address : addr,
secret : s
};
return importKeys(newKeys);
},
upgradeFromStr: redeemStr
};
})
| clarify on centaurus donation address
| www/js/services/services.js | clarify on centaurus donation address | <ide><path>ww/js/services/services.js
<ide> .factory('Contacts', function () {
<ide> // contact names are considered an id and have to be unique
<ide> var contacts = [
<del> { name: 'Centaurus', address: centaurusAddress, memo: null, memoType: null }
<add> { name: 'Centaurus donation address', address: centaurusAddress, memo: null, memoType: null }
<ide> ];
<ide>
<ide> var contactsString = window.localStorage['contacts001']; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.